diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 7c42f6dc2..4343d612b 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -e7b127cb07af8dd4d8c61c7cc045c8910cdbb02a \ No newline at end of file +22f09783eb8a84d52026f856be3b2068f9498db3 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 438ff6f6d..2c2d21e9c 100755 --- a/.gitattributes +++ b/.gitattributes @@ -259,8 +259,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetas databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreDeltaSharingScope.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateModelVersionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePermissions.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePredictiveOptimization.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePredictiveOptimizationResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRegisteredModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSchema.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java linguist-generated=true @@ -1189,6 +1187,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessLi databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListIpAccessListResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNccAzurePrivateEndpointRulesResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNetworkConnectivityConfigurationsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPrivateEndpointRulesRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokenManagementRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokensResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListType.java linguist-generated=true diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index e6b3f42a4..339704ee8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -998,7 +998,18 @@ public ServingEndpointsAPI servingEndpoints() { return servingEndpointsAPI; } - /** // TODO(yuyuan.tang) to add the description for the setting */ + /** + * The default namespace setting API allows users to configure the default namespace for a + * Databricks workspace. + * + *
Through this API, users can retrieve, set, or modify the default namespace used when queries + * do not reference a fully qualified three-level name. For example, if you use the API to set + * 'retail_prod' as the default catalog, then a query 'SELECT * FROM myTable' would reference the + * object 'retail_prod.default.myTable' (the schema 'default' is always assumed). + * + *
This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, + * the default namespace only applies when using Unity Catalog-enabled compute. + */ public SettingsAPI settings() { return settingsAPI; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java index cd8c6e8a6..7e43d4347 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java @@ -92,22 +92,6 @@ public void delete(DeleteMetastoreRequest request) { impl.delete(request); } - public UpdatePredictiveOptimizationResponse enableOptimization( - String metastoreId, boolean enable) { - return enableOptimization( - new UpdatePredictiveOptimization().setMetastoreId(metastoreId).setEnable(enable)); - } - - /** - * Toggle predictive optimization on the metastore. - * - *
Enables or disables predictive optimization on the metastore.
- */
- public UpdatePredictiveOptimizationResponse enableOptimization(
- UpdatePredictiveOptimization request) {
- return impl.enableOptimization(request);
- }
-
public MetastoreInfo get(String id) {
return get(new GetMetastoreRequest().setId(id));
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresImpl.java
index b533753bc..7fb32ce50 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresImpl.java
@@ -50,16 +50,6 @@ public void delete(DeleteMetastoreRequest request) {
apiClient.DELETE(path, request, Void.class, headers);
}
- @Override
- public UpdatePredictiveOptimizationResponse enableOptimization(
- UpdatePredictiveOptimization request) {
- String path = "/api/2.0/predictive-optimization/service";
- Map Enables or disables predictive optimization on the metastore.
- */
- UpdatePredictiveOptimizationResponse enableOptimization(
- UpdatePredictiveOptimization updatePredictiveOptimization);
-
/**
* Get a metastore.
*
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintList.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintList.java
index b725c599a..73a6dadaf 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintList.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintList.java
@@ -10,7 +10,9 @@
@Generated
public class TableConstraintList {
- /** List of table constraints. */
+ /**
+ * List of table constraints. Note: this field is not set in the output of the __listTables__ API.
+ */
@JsonProperty("table_constraints")
private Collection [Databricks Cluster Policy Definition Language]:
+ * https://docs.databricks.com/administration-guide/clusters/policy-definition.html
+ */
@JsonProperty("definition")
private String definition;
@@ -37,11 +42,14 @@ public class CreatePolicy {
private String name;
/**
- * Policy definition JSON document expressed in Databricks Policy Definition Language. The JSON
+ * Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON
* document must be passed as a string and cannot be embedded in the requests.
*
* You can use this to customize the policy definition inherited from the policy family. Policy
* rules specified here are merged into the inherited policy definition.
+ *
+ * [Databricks Policy Definition Language]:
+ * https://docs.databricks.com/administration-guide/clusters/policy-definition.html
*/
@JsonProperty("policy_family_definition_overrides")
private String policyFamilyDefinitionOverrides;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicy.java
index efc04bfe8..6dd048db9 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicy.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicy.java
@@ -10,7 +10,12 @@
@Generated
public class EditPolicy {
- /** Policy definition document expressed in Databricks Cluster Policy Definition Language. */
+ /**
+ * Policy definition document expressed in [Databricks Cluster Policy Definition Language].
+ *
+ * [Databricks Cluster Policy Definition Language]:
+ * https://docs.databricks.com/administration-guide/clusters/policy-definition.html
+ */
@JsonProperty("definition")
private String definition;
@@ -37,11 +42,14 @@ public class EditPolicy {
private String name;
/**
- * Policy definition JSON document expressed in Databricks Policy Definition Language. The JSON
+ * Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON
* document must be passed as a string and cannot be embedded in the requests.
*
* You can use this to customize the policy definition inherited from the policy family. Policy
* rules specified here are merged into the inherited policy definition.
+ *
+ * [Databricks Policy Definition Language]:
+ * https://docs.databricks.com/administration-guide/clusters/policy-definition.html
*/
@JsonProperty("policy_family_definition_overrides")
private String policyFamilyDefinitionOverrides;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequest.java
index 0a56ec462..79c7449ad 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequest.java
@@ -17,7 +17,10 @@ public class InstancePoolAccessControlRequest {
@JsonProperty("permission_level")
private InstancePoolPermissionLevel permissionLevel;
- /** name of the service principal */
+ /**
+ * Application ID of an active service principal. Setting this field requires the
+ * `servicePrincipal/user` role.
+ */
@JsonProperty("service_principal_name")
private String servicePrincipalName;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributes.java
index bd8d5545d..5858ba750 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributes.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAwsAttributes.java
@@ -39,7 +39,7 @@ public class InstancePoolAwsAttributes {
* the Databricks deployment. For example, "us-west-2a" is not a valid zone id if the Databricks
* deployment resides in the "us-east-1" region. This is an optional field at cluster creation,
* and if not specified, a default zone will be used. The list of available zones as well as the
- * default value can be found by using the `List Zones`_ method.
+ * default value can be found by using the `List Zones` method.
*/
@JsonProperty("zone_id")
private String zoneId;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Policy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Policy.java
index b61c2e6d4..fff3fb236 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Policy.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Policy.java
@@ -21,7 +21,12 @@ public class Policy {
@JsonProperty("creator_user_name")
private String creatorUserName;
- /** Policy definition document expressed in Databricks Cluster Policy Definition Language. */
+ /**
+ * Policy definition document expressed in [Databricks Cluster Policy Definition Language].
+ *
+ * [Databricks Cluster Policy Definition Language]:
+ * https://docs.databricks.com/administration-guide/clusters/policy-definition.html
+ */
@JsonProperty("definition")
private String definition;
@@ -55,11 +60,14 @@ public class Policy {
private String name;
/**
- * Policy definition JSON document expressed in Databricks Policy Definition Language. The JSON
+ * Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON
* document must be passed as a string and cannot be embedded in the requests.
*
* You can use this to customize the policy definition inherited from the policy family. Policy
* rules specified here are merged into the inherited policy definition.
+ *
+ * [Databricks Policy Definition Language]:
+ * https://docs.databricks.com/administration-guide/clusters/policy-definition.html
*/
@JsonProperty("policy_family_definition_overrides")
private String policyFamilyDefinitionOverrides;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamily.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamily.java
index 9676f19c3..c40033e13 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamily.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamily.java
@@ -9,7 +9,12 @@
@Generated
public class PolicyFamily {
- /** Policy definition document expressed in Databricks Cluster Policy Definition Language. */
+ /**
+ * Policy definition document expressed in [Databricks Cluster Policy Definition Language].
+ *
+ * [Databricks Cluster Policy Definition Language]:
+ * https://docs.databricks.com/administration-guide/clusters/policy-definition.html
+ */
@JsonProperty("definition")
private String definition;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlRequest.java
index 59f3b9fc0..7db3e07d0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlRequest.java
@@ -17,7 +17,10 @@ public class AccessControlRequest {
@JsonProperty("permission_level")
private PermissionLevel permissionLevel;
- /** name of the service principal */
+ /**
+ * Application ID of an active service principal. Setting this field requires the
+ * `servicePrincipal/user` role.
+ */
@JsonProperty("service_principal_name")
private String servicePrincipalName;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsAPI.java
index 3d52179b1..be8ce4b90 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccountGroupsAPI.java
@@ -3,6 +3,7 @@
import com.databricks.sdk.core.ApiClient;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.Paginator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -72,7 +73,20 @@ public Group get(GetAccountGroupRequest request) {
* Gets all details of the groups associated with the Databricks account.
*/
public Iterable Gets the set of service principals associated with a Databricks account.
*/
public Iterable Gets details for all the users associated with a Databricks account.
*/
public Iterable Gets all details of the groups associated with the Databricks workspace.
*/
public Iterable Gets the set of service principals associated with a Databricks workspace.
*/
public Iterable Gets details for all the users associated with a Databricks workspace.
*/
public Iterable This value cannot exceed 1000\. Setting this value to `0` causes all new runs to be skipped.
+ * This value cannot exceed 1000. Setting this value to `0` causes all new runs to be skipped.
*/
@JsonProperty("max_concurrent_runs")
private Long maxConcurrentRuns;
@@ -199,6 +205,15 @@ public JobDeployment getDeployment() {
return deployment;
}
+ public CreateJob setDescription(String description) {
+ this.description = description;
+ return this;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
public CreateJob setEditMode(CreateJobEditMode editMode) {
this.editMode = editMode;
return this;
@@ -370,6 +385,7 @@ public boolean equals(Object o) {
&& Objects.equals(compute, that.compute)
&& Objects.equals(continuous, that.continuous)
&& Objects.equals(deployment, that.deployment)
+ && Objects.equals(description, that.description)
&& Objects.equals(editMode, that.editMode)
&& Objects.equals(emailNotifications, that.emailNotifications)
&& Objects.equals(format, that.format)
@@ -397,6 +413,7 @@ public int hashCode() {
compute,
continuous,
deployment,
+ description,
editMode,
emailNotifications,
format,
@@ -424,6 +441,7 @@ public String toString() {
.add("compute", compute)
.add("continuous", continuous)
.add("deployment", deployment)
+ .add("description", description)
.add("editMode", editMode)
.add("emailNotifications", emailNotifications)
.add("format", format)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlRequest.java
index 7a60d9919..3bce29c82 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlRequest.java
@@ -17,7 +17,10 @@ public class JobAccessControlRequest {
@JsonProperty("permission_level")
private JobPermissionLevel permissionLevel;
- /** name of the service principal */
+ /**
+ * Application ID of an active service principal. Setting this field requires the
+ * `servicePrincipal/user` role.
+ */
@JsonProperty("service_principal_name")
private String servicePrincipalName;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java
index 5ef99a058..72dda500d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java
@@ -26,6 +26,12 @@ public class JobSettings {
@JsonProperty("deployment")
private JobDeployment deployment;
+ /**
+ * An optional description for the job. The maximum length is 1024 characters in UTF-8 encoding.
+ */
+ @JsonProperty("description")
+ private String description;
+
/**
* Edit mode of the job.
*
@@ -88,7 +94,7 @@ public class JobSettings {
* active runs. However, from then on, new runs are skipped unless there are fewer than 3 active
* runs.
*
- * This value cannot exceed 1000\. Setting this value to `0` causes all new runs to be skipped.
+ * This value cannot exceed 1000. Setting this value to `0` causes all new runs to be skipped.
*/
@JsonProperty("max_concurrent_runs")
private Long maxConcurrentRuns;
@@ -185,6 +191,15 @@ public JobDeployment getDeployment() {
return deployment;
}
+ public JobSettings setDescription(String description) {
+ this.description = description;
+ return this;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
public JobSettings setEditMode(JobSettingsEditMode editMode) {
this.editMode = editMode;
return this;
@@ -355,6 +370,7 @@ public boolean equals(Object o) {
return Objects.equals(compute, that.compute)
&& Objects.equals(continuous, that.continuous)
&& Objects.equals(deployment, that.deployment)
+ && Objects.equals(description, that.description)
&& Objects.equals(editMode, that.editMode)
&& Objects.equals(emailNotifications, that.emailNotifications)
&& Objects.equals(format, that.format)
@@ -381,6 +397,7 @@ public int hashCode() {
compute,
continuous,
deployment,
+ description,
editMode,
emailNotifications,
format,
@@ -407,6 +424,7 @@ public String toString() {
.add("compute", compute)
.add("continuous", continuous)
.add("deployment", deployment)
+ .add("description", description)
.add("editMode", editMode)
.add("emailNotifications", emailNotifications)
.add("format", format)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequest.java
index 700c97ed1..906deda04 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequest.java
@@ -17,7 +17,10 @@ public class ExperimentAccessControlRequest {
@JsonProperty("permission_level")
private ExperimentPermissionLevel permissionLevel;
- /** name of the service principal */
+ /**
+ * Application ID of an active service principal. Setting this field requires the
+ * `servicePrincipal/user` role.
+ */
@JsonProperty("service_principal_name")
private String servicePrincipalName;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlRequest.java
index 22f47014e..bc6ba6c45 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlRequest.java
@@ -17,7 +17,10 @@ public class RegisteredModelAccessControlRequest {
@JsonProperty("permission_level")
private RegisteredModelPermissionLevel permissionLevel;
- /** name of the service principal */
+ /**
+ * Application ID of an active service principal. Setting this field requires the
+ * `servicePrincipal/user` role.
+ */
@JsonProperty("service_principal_name")
private String servicePrincipalName;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java
index aa7160397..1f421fb13 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java
@@ -17,7 +17,10 @@ public class PipelineAccessControlRequest {
@JsonProperty("permission_level")
private PipelinePermissionLevel permissionLevel;
- /** name of the service principal */
+ /**
+ * Application ID of an active service principal. Setting this field requires the
+ * `servicePrincipal/user` role.
+ */
@JsonProperty("service_principal_name")
private String servicePrincipalName;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlRequest.java
index ac4bb3b81..63893df68 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlRequest.java
@@ -17,7 +17,10 @@ public class ServingEndpointAccessControlRequest {
@JsonProperty("permission_level")
private ServingEndpointPermissionLevel permissionLevel;
- /** name of the service principal */
+ /**
+ * Application ID of an active service principal. Setting this field requires the
+ * `servicePrincipal/user` role.
+ */
@JsonProperty("service_principal_name")
private String servicePrincipalName;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceSetting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceSetting.java
index f0ad5974e..eb12d3fd3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceSetting.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceSetting.java
@@ -7,7 +7,15 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** Default namespace setting. */
+/**
+ * This represents the setting configuration for the default namespace in the Databricks workspace.
+ * Setting the default catalog for the workspace determines the catalog that is used when queries do
+ * not reference a fully qualified 3 level name. For example, if the default catalog is set to
+ * 'retail_prod' then a query 'SELECT * FROM myTable' would reference the object
+ * 'retail_prod.default.myTable' (the schema 'default' is always assumed). This setting requires a
+ * restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only
+ * applies when using Unity Catalog-enabled compute.
+ */
@Generated
public class DefaultNamespaceSetting {
/**
@@ -28,7 +36,8 @@ public class DefaultNamespaceSetting {
/**
* Name of the corresponding setting. This field is populated in the response, but it will not be
* respected even if it's set in the request body. The setting name in the path parameter will be
- * respected instead.
+ * respected instead. Setting name is required to be 'default' if the setting only has one
+ * instance per workspace.
*/
@JsonProperty("setting_name")
private String settingName;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultWorkspaceNamespaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultWorkspaceNamespaceRequest.java
index 2356849af..6580a24bd 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultWorkspaceNamespaceRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultWorkspaceNamespaceRequest.java
@@ -7,7 +7,7 @@
import com.databricks.sdk.support.ToStringer;
import java.util.Objects;
-/** Delete the default namespace */
+/** Delete the default namespace setting */
@Generated
public class DeleteDefaultWorkspaceNamespaceRequest {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNccAzurePrivateEndpointRulesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNccAzurePrivateEndpointRulesResponse.java
new file mode 100755
index 000000000..23094c9fa
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListNccAzurePrivateEndpointRulesResponse.java
@@ -0,0 +1,63 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ListNccAzurePrivateEndpointRulesResponse {
+ /** */
+ @JsonProperty("items")
+ private Collection Gets an array of network connectivity configurations.
+ */
+ public Iterable Gets an array of private endpoint rules.
+ */
+ public Iterable Gets an array of network connectivity configurations.
+ */
+ ListNetworkConnectivityConfigurationsResponse listNetworkConnectivityConfigurations(
+ ListNetworkConnectivityConfigurationsRequest listNetworkConnectivityConfigurationsRequest);
+
+ /**
+ * List private endpoint rules.
+ *
+ * Gets an array of private endpoint rules.
+ */
+ ListNccAzurePrivateEndpointRulesResponse listPrivateEndpointRules(
+ ListPrivateEndpointRulesRequest listPrivateEndpointRulesRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReadDefaultWorkspaceNamespaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReadDefaultWorkspaceNamespaceRequest.java
index ba2da921a..80069946d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReadDefaultWorkspaceNamespaceRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReadDefaultWorkspaceNamespaceRequest.java
@@ -7,7 +7,7 @@
import com.databricks.sdk.support.ToStringer;
import java.util.Objects;
-/** Get the default namespace */
+/** Get the default namespace setting */
@Generated
public class ReadDefaultWorkspaceNamespaceRequest {
/**
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java
index cf0073d7d..3bfc62016 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java
@@ -6,7 +6,18 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-/** // TODO(yuyuan.tang) to add the description for the setting */
+/**
+ * The default namespace setting API allows users to configure the default namespace for a
+ * Databricks workspace.
+ *
+ * Through this API, users can retrieve, set, or modify the default namespace used when queries
+ * do not reference a fully qualified three-level name. For example, if you use the API to set
+ * 'retail_prod' as the default catalog, then a query 'SELECT * FROM myTable' would reference the
+ * object 'retail_prod.default.myTable' (the schema 'default' is always assumed).
+ *
+ * This setting requires a restart of clusters and SQL warehouses to take effect. Additionally,
+ * the default namespace only applies when using Unity Catalog-enabled compute.
+ */
@Generated
public class SettingsAPI {
private static final Logger LOG = LoggerFactory.getLogger(SettingsAPI.class);
@@ -29,9 +40,12 @@ public DeleteDefaultWorkspaceNamespaceResponse deleteDefaultWorkspaceNamespace(S
}
/**
- * Delete the default namespace.
+ * Delete the default namespace setting.
*
- * Deletes the default namespace.
+ * Deletes the default namespace setting for the workspace. A fresh etag needs to be provided
+ * in DELETE requests (as a query parameter). The etag can be retrieved by making a GET request
+ * before the DELETE request. If the setting is updated/deleted concurrently, DELETE will fail
+ * with 409 and the request will need to be retried by using the fresh etag in the 409 response.
*/
public DeleteDefaultWorkspaceNamespaceResponse deleteDefaultWorkspaceNamespace(
DeleteDefaultWorkspaceNamespaceRequest request) {
@@ -43,9 +57,9 @@ public DefaultNamespaceSetting readDefaultWorkspaceNamespace(String etag) {
}
/**
- * Get the default namespace.
+ * Get the default namespace setting.
*
- * Gets the default namespace.
+ * Gets the default namespace setting.
*/
public DefaultNamespaceSetting readDefaultWorkspaceNamespace(
ReadDefaultWorkspaceNamespaceRequest request) {
@@ -53,13 +67,14 @@ public DefaultNamespaceSetting readDefaultWorkspaceNamespace(
}
/**
- * Updates the default namespace setting.
+ * Update the default namespace setting.
*
* Updates the default namespace setting for the workspace. A fresh etag needs to be provided
- * in PATCH requests (as part the setting field). The etag can be retrieved by making a GET
+ * in PATCH requests (as part of the setting field). The etag can be retrieved by making a GET
* request before the PATCH request. Note that if the setting does not exist, GET will return a
* NOT_FOUND error and the etag will be present in the error response, which should be set in the
- * PATCH request.
+ * PATCH request. If the setting is updated concurrently, PATCH will fail with 409 and the request
+ * will need to be retried by using the fresh etag in the 409 response.
*/
public DefaultNamespaceSetting updateDefaultWorkspaceNamespace(
UpdateDefaultWorkspaceNamespaceRequest request) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsService.java
index 4775132d8..bfc401bae 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsService.java
@@ -4,7 +4,16 @@
import com.databricks.sdk.support.Generated;
/**
- * // TODO(yuyuan.tang) to add the description for the setting
+ * The default namespace setting API allows users to configure the default namespace for a
+ * Databricks workspace.
+ *
+ * Through this API, users can retrieve, set, or modify the default namespace used when queries
+ * do not reference a fully qualified three-level name. For example, if you use the API to set
+ * 'retail_prod' as the default catalog, then a query 'SELECT * FROM myTable' would reference the
+ * object 'retail_prod.default.myTable' (the schema 'default' is always assumed).
+ *
+ * This setting requires a restart of clusters and SQL warehouses to take effect. Additionally,
+ * the default namespace only applies when using Unity Catalog-enabled compute.
*
* This is the high-level interface, that contains generated methods.
*
@@ -13,29 +22,33 @@
@Generated
public interface SettingsService {
/**
- * Delete the default namespace.
+ * Delete the default namespace setting.
*
- * Deletes the default namespace.
+ * Deletes the default namespace setting for the workspace. A fresh etag needs to be provided
+ * in DELETE requests (as a query parameter). The etag can be retrieved by making a GET request
+ * before the DELETE request. If the setting is updated/deleted concurrently, DELETE will fail
+ * with 409 and the request will need to be retried by using the fresh etag in the 409 response.
*/
DeleteDefaultWorkspaceNamespaceResponse deleteDefaultWorkspaceNamespace(
DeleteDefaultWorkspaceNamespaceRequest deleteDefaultWorkspaceNamespaceRequest);
/**
- * Get the default namespace.
+ * Get the default namespace setting.
*
- * Gets the default namespace.
+ * Gets the default namespace setting.
*/
DefaultNamespaceSetting readDefaultWorkspaceNamespace(
ReadDefaultWorkspaceNamespaceRequest readDefaultWorkspaceNamespaceRequest);
/**
- * Updates the default namespace setting.
+ * Update the default namespace setting.
*
* Updates the default namespace setting for the workspace. A fresh etag needs to be provided
- * in PATCH requests (as part the setting field). The etag can be retrieved by making a GET
+ * in PATCH requests (as part of the setting field). The etag can be retrieved by making a GET
* request before the PATCH request. Note that if the setting does not exist, GET will return a
* NOT_FOUND error and the etag will be present in the error response, which should be set in the
- * PATCH request.
+ * PATCH request. If the setting is updated concurrently, PATCH will fail with 409 and the request
+ * will need to be retried by using the fresh etag in the 409 response.
*/
DefaultNamespaceSetting updateDefaultWorkspaceNamespace(
UpdateDefaultWorkspaceNamespaceRequest updateDefaultWorkspaceNamespaceRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlRequest.java
index 93992ed80..baf5d6388 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlRequest.java
@@ -17,7 +17,10 @@ public class TokenAccessControlRequest {
@JsonProperty("permission_level")
private TokenPermissionLevel permissionLevel;
- /** name of the service principal */
+ /**
+ * Application ID of an active service principal. Setting this field requires the
+ * `servicePrincipal/user` role.
+ */
@JsonProperty("service_principal_name")
private String servicePrincipalName;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultWorkspaceNamespaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultWorkspaceNamespaceRequest.java
index d3128a794..5b104f7ec 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultWorkspaceNamespaceRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultWorkspaceNamespaceRequest.java
@@ -7,25 +7,33 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
-/** Updates the default namespace setting */
+/** Update the default namespace setting */
@Generated
public class UpdateDefaultWorkspaceNamespaceRequest {
- /** This should always be set to true for Settings RPCs. Added for AIP compliance. */
+ /** This should always be set to true for Settings API. Added for AIP compliance. */
@JsonProperty("allow_missing")
private Boolean allowMissing;
/**
- * Field mask required to be passed into the PATCH request. Field mask specifies which fields of
- * the setting payload will be updated. For example, for Default Namespace setting, the field mask
- * is supposed to contain fields from the DefaultNamespaceSetting.namespace schema.
+ * Field mask is required to be passed into the PATCH request. Field mask specifies which fields
+ * of the setting payload will be updated. For example, for Default Namespace setting, the field
+ * mask is supposed to contain fields from the DefaultNamespaceSetting.namespace schema.
*
- * The field mask needs to supplied as single string. To specify multiple fields in the field
- * mask, use comma as the seperator (no space).
+ * The field mask needs to be supplied as single string. To specify multiple fields in the
+ * field mask, use comma as the seperator (no space).
*/
@JsonProperty("field_mask")
private String fieldMask;
- /** Default namespace setting. */
+ /**
+ * This represents the setting configuration for the default namespace in the Databricks
+ * workspace. Setting the default catalog for the workspace determines the catalog that is used
+ * when queries do not reference a fully qualified 3 level name. For example, if the default
+ * catalog is set to 'retail_prod' then a query 'SELECT * FROM myTable' would reference the object
+ * 'retail_prod.default.myTable' (the schema 'default' is always assumed). This setting requires a
+ * restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only
+ * applies when using Unity Catalog-enabled compute.
+ */
@JsonProperty("setting")
private DefaultNamespaceSetting setting;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java
index 645815abb..381139da8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java
@@ -74,6 +74,15 @@ public class SharedDataObject {
@JsonProperty("status")
private SharedDataObjectStatus status;
+ /**
+ * A user-provided new name for the data object within the share. If this new name is not
+ * provided, the object's original name will be used as the `string_shared_as` name. The
+ * `string_shared_as` name must be unique within a share. For notebooks, the new name should be
+ * the new notebook file name.
+ */
+ @JsonProperty("string_shared_as")
+ private String stringSharedAs;
+
public SharedDataObject setAddedAt(Long addedAt) {
this.addedAt = addedAt;
return this;
@@ -174,6 +183,15 @@ public SharedDataObjectStatus getStatus() {
return status;
}
+ public SharedDataObject setStringSharedAs(String stringSharedAs) {
+ this.stringSharedAs = stringSharedAs;
+ return this;
+ }
+
+ public String getStringSharedAs() {
+ return stringSharedAs;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
@@ -189,7 +207,8 @@ public boolean equals(Object o) {
&& Objects.equals(partitions, that.partitions)
&& Objects.equals(sharedAs, that.sharedAs)
&& Objects.equals(startVersion, that.startVersion)
- && Objects.equals(status, that.status);
+ && Objects.equals(status, that.status)
+ && Objects.equals(stringSharedAs, that.stringSharedAs);
}
@Override
@@ -205,7 +224,8 @@ public int hashCode() {
partitions,
sharedAs,
startVersion,
- status);
+ status,
+ stringSharedAs);
}
@Override
@@ -222,6 +242,7 @@ public String toString() {
.add("sharedAs", sharedAs)
.add("startVersion", startVersion)
.add("status", status)
+ .add("stringSharedAs", stringSharedAs)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlRequest.java
index 261715dde..594238f8c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlRequest.java
@@ -17,7 +17,10 @@ public class WarehouseAccessControlRequest {
@JsonProperty("permission_level")
private WarehousePermissionLevel permissionLevel;
- /** name of the service principal */
+ /**
+ * Application ID of an active service principal. Setting this field requires the
+ * `servicePrincipal/user` role.
+ */
@JsonProperty("service_principal_name")
private String servicePrincipalName;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportFormat.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportFormat.java
index 947d58587..07720c444 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportFormat.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportFormat.java
@@ -6,6 +6,7 @@
@Generated
public enum ExportFormat {
+ AUTO,
DBC,
HTML,
JUPYTER,
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportRequest.java
index b20a6cf9a..78a1cf9c6 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ExportRequest.java
@@ -15,17 +15,20 @@ public class ExportRequest {
*
* The value is case sensitive.
*
- * - `SOURCE`: The notebook is exported as source code. - `HTML`: The notebook is exported as
- * an HTML file. - `JUPYTER`: The notebook is exported as a Jupyter/IPython Notebook file. -
- * `DBC`: The notebook is exported in Databricks archive format. - `R_MARKDOWN`: The notebook is
- * exported to R Markdown format.
+ * - `SOURCE`: The notebook is exported as source code. Directory exports will not include
+ * non-notebook entries. - `HTML`: The notebook is exported as an HTML file. - `JUPYTER`: The
+ * notebook is exported as a Jupyter/IPython Notebook file. - `DBC`: The notebook is exported in
+ * Databricks archive format. Directory exports will not include non-notebook entries. -
+ * `R_MARKDOWN`: The notebook is exported to R Markdown format. - `AUTO`: The object or directory
+ * is exported depending on the objects type. Directory exports will include notebooks and
+ * workspace files.
*/
@QueryParam("format")
private ExportFormat format;
/**
* The absolute path of the object or directory. Exporting a directory is only supported for the
- * `DBC` and `SOURCE` format.
+ * `DBC`, `SOURCE`, and `AUTO` format.
*/
@QueryParam("path")
private String path;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlRequest.java
index c306654e7..9a34d3307 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlRequest.java
@@ -17,7 +17,10 @@ public class RepoAccessControlRequest {
@JsonProperty("permission_level")
private RepoPermissionLevel permissionLevel;
- /** name of the service principal */
+ /**
+ * Application ID of an active service principal. Setting this field requires the
+ * `servicePrincipal/user` role.
+ */
@JsonProperty("service_principal_name")
private String servicePrincipalName;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequest.java
index 9aaaff0c5..3d6832dd3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequest.java
@@ -17,7 +17,10 @@ public class WorkspaceObjectAccessControlRequest {
@JsonProperty("permission_level")
private WorkspaceObjectPermissionLevel permissionLevel;
- /** name of the service principal */
+ /**
+ * Application ID of an active service principal. Setting this field requires the
+ * `servicePrincipal/user` role.
+ */
@JsonProperty("service_principal_name")
private String servicePrincipalName;