[get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get */ @JsonProperty("serialized_dashboard") private String serializedDashboard; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Dashboard.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Dashboard.java index e5f1c4484..d94cee027 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Dashboard.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Dashboard.java @@ -40,15 +40,19 @@ public class Dashboard { private String parentPath; /** - * The workspace path of the dashboard asset, including the file name. This field is excluded in - * List Dashboards responses. + * The workspace path of the dashboard asset, including the file name. Exported dashboards always + * have the file extension `.lvdash.json`. This field is excluded in List Dashboards responses. */ @JsonProperty("path") private String path; /** * The contents of the dashboard in serialized string form. This field is excluded in List - * Dashboards responses. + * Dashboards responses. Use the [get dashboard API] to retrieve an example response, which + * includes the `serialized_dashboard` field. This field provides the structure of the JSON string + * that represents the dashboard's layout and components. + * + *
[get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get */ @JsonProperty("serialized_dashboard") private String serializedDashboard; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java index 6737439ea..15c434997 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieMessage.java @@ -48,8 +48,9 @@ public class GenieMessage { /** * MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data - * sources. * `ASKING_AI`: Waiting for the LLM to respond to the users question. * - * `EXECUTING_QUERY`: Executing AI provided SQL query. Get the SQL query result by calling + * sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. * + * `ASKING_AI`: Waiting for the LLM to respond to the users question. * `EXECUTING_QUERY`: + * Executing AI provided SQL query. Get the SQL query result by calling * [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message * status will stay in the `EXECUTING_QUERY` until a client calls * [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java index cfdd2b9f8..5e1c94580 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java @@ -31,6 +31,7 @@ public enum MessageErrorType { LOCAL_CONTEXT_EXCEEDED_EXCEPTION, MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION, MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION, + NO_QUERY_TO_VISUALIZE_EXCEPTION, NO_TABLES_TO_QUERY_EXCEPTION, RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION, RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageStatus.java index 81985d08c..972f44191 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageStatus.java @@ -6,8 +6,9 @@ /** * MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data - * sources. * `ASKING_AI`: Waiting for the LLM to respond to the users question. * - * `EXECUTING_QUERY`: Executing AI provided SQL query. Get the SQL query result by calling + * sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. * + * `ASKING_AI`: Waiting for the LLM to respond to the users question. * `EXECUTING_QUERY`: Executing + * AI provided SQL query. Get the SQL query result by calling * [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message status * will stay in the `EXECUTING_QUERY` until a client calls * [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a response @@ -31,6 +32,7 @@ public enum MessageStatus { FAILED, // Generating a response or the executing the query failed. Please see `error` // field. FETCHING_METADATA, // Fetching metadata from the data sources. + FILTERING_CONTEXT, // Running smart context step to determine relevant context. QUERY_RESULT_EXPIRED, // SQL result is not available anymore. The user needs to execute the query // again. SUBMITTED, // Message has been submitted. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Result.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Result.java index d36303462..de3b6451a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Result.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Result.java @@ -9,6 +9,10 @@ @Generated public class Result { + /** If result is truncated */ + @JsonProperty("is_truncated") + private Boolean isTruncated; + /** Row count of the result */ @JsonProperty("row_count") private Long rowCount; @@ -20,6 +24,15 @@ public class Result { @JsonProperty("statement_id") private String statementId; + public Result setIsTruncated(Boolean isTruncated) { + this.isTruncated = isTruncated; + return this; + } + + public Boolean getIsTruncated() { + return isTruncated; + } + public Result setRowCount(Long rowCount) { this.rowCount = rowCount; return this; @@ -43,17 +56,20 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Result that = (Result) o; - return Objects.equals(rowCount, that.rowCount) && Objects.equals(statementId, that.statementId); + return Objects.equals(isTruncated, that.isTruncated) + && Objects.equals(rowCount, that.rowCount) + && Objects.equals(statementId, that.statementId); } @Override public int hashCode() { - return Objects.hash(rowCount, statementId); + return Objects.hash(isTruncated, rowCount, statementId); } @Override public String toString() { return new ToStringer(Result.class) + .add("isTruncated", isTruncated) .add("rowCount", rowCount) .add("statementId", statementId) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java index d4a12c274..f9821b02a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java @@ -26,7 +26,11 @@ public class UpdateDashboardRequest { /** * The contents of the dashboard in serialized string form. This field is excluded in List - * Dashboards responses. + * Dashboards responses. Use the [get dashboard API] to retrieve an example response, which + * includes the `serialized_dashboard` field. This field provides the structure of the JSON string + * that represents the dashboard's layout and components. + * + *
[get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get
*/
@JsonProperty("serialized_dashboard")
private String serializedDashboard;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java
index fa4700fc4..4f335aaa0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java
@@ -22,6 +22,15 @@ public class BaseJob {
@JsonProperty("creator_user_name")
private String creatorUserName;
+ /**
+ * The id of the budget policy used by this job for cost attribution purposes. This may be set
+ * through (in order of precedence): 1. Budget admins through the account or workspace console 2.
+ * Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based
+ * on accessible budget policies of the run_as identity on job creation or modification.
+ */
+ @JsonProperty("effective_budget_policy_id")
+ private String effectiveBudgetPolicyId;
+
/** The canonical identifier for this job. */
@JsonProperty("job_id")
private Long jobId;
@@ -51,6 +60,15 @@ public String getCreatorUserName() {
return creatorUserName;
}
+ public BaseJob setEffectiveBudgetPolicyId(String effectiveBudgetPolicyId) {
+ this.effectiveBudgetPolicyId = effectiveBudgetPolicyId;
+ return this;
+ }
+
+ public String getEffectiveBudgetPolicyId() {
+ return effectiveBudgetPolicyId;
+ }
+
public BaseJob setJobId(Long jobId) {
this.jobId = jobId;
return this;
@@ -76,13 +94,14 @@ public boolean equals(Object o) {
BaseJob that = (BaseJob) o;
return Objects.equals(createdTime, that.createdTime)
&& Objects.equals(creatorUserName, that.creatorUserName)
+ && Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId)
&& Objects.equals(jobId, that.jobId)
&& Objects.equals(settings, that.settings);
}
@Override
public int hashCode() {
- return Objects.hash(createdTime, creatorUserName, jobId, settings);
+ return Objects.hash(createdTime, creatorUserName, effectiveBudgetPolicyId, jobId, settings);
}
@Override
@@ -90,6 +109,7 @@ public String toString() {
return new ToStringer(BaseJob.class)
.add("createdTime", createdTime)
.add("creatorUserName", creatorUserName)
+ .add("effectiveBudgetPolicyId", effectiveBudgetPolicyId)
.add("jobId", jobId)
.add("settings", settings)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java
index 040a695d3..e69adebee 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java
@@ -15,6 +15,14 @@ public class CreateJob {
@JsonProperty("access_control_list")
private Collection [Link]:
* https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java
index cd5180a13..e51d9ff19 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java
@@ -95,6 +95,13 @@ public class CreatePipeline {
@JsonProperty("photon")
private Boolean photon;
+ /**
+ * The default schema (database) where tables are read from or published to. The presence of this
+ * field implies that the pipeline is in direct publishing mode.
+ */
+ @JsonProperty("schema")
+ private String schema;
+
/** Whether serverless compute is enabled for this pipeline. */
@JsonProperty("serverless")
private Boolean serverless;
@@ -286,6 +293,15 @@ public Boolean getPhoton() {
return photon;
}
+ public CreatePipeline setSchema(String schema) {
+ this.schema = schema;
+ return this;
+ }
+
+ public String getSchema() {
+ return schema;
+ }
+
public CreatePipeline setServerless(Boolean serverless) {
this.serverless = serverless;
return this;
@@ -346,6 +362,7 @@ public boolean equals(Object o) {
&& Objects.equals(name, that.name)
&& Objects.equals(notifications, that.notifications)
&& Objects.equals(photon, that.photon)
+ && Objects.equals(schema, that.schema)
&& Objects.equals(serverless, that.serverless)
&& Objects.equals(storage, that.storage)
&& Objects.equals(target, that.target)
@@ -374,6 +391,7 @@ public int hashCode() {
name,
notifications,
photon,
+ schema,
serverless,
storage,
target,
@@ -402,6 +420,7 @@ public String toString() {
.add("name", name)
.add("notifications", notifications)
.add("photon", photon)
+ .add("schema", schema)
.add("serverless", serverless)
.add("storage", storage)
.add("target", target)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java
index 0ea12b587..52cbcee7c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java
@@ -104,6 +104,13 @@ public class EditPipeline {
@JsonProperty("pipeline_id")
private String pipelineId;
+ /**
+ * The default schema (database) where tables are read from or published to. The presence of this
+ * field implies that the pipeline is in direct publishing mode.
+ */
+ @JsonProperty("schema")
+ private String schema;
+
/** Whether serverless compute is enabled for this pipeline. */
@JsonProperty("serverless")
private Boolean serverless;
@@ -304,6 +311,15 @@ public String getPipelineId() {
return pipelineId;
}
+ public EditPipeline setSchema(String schema) {
+ this.schema = schema;
+ return this;
+ }
+
+ public String getSchema() {
+ return schema;
+ }
+
public EditPipeline setServerless(Boolean serverless) {
this.serverless = serverless;
return this;
@@ -365,6 +381,7 @@ public boolean equals(Object o) {
&& Objects.equals(notifications, that.notifications)
&& Objects.equals(photon, that.photon)
&& Objects.equals(pipelineId, that.pipelineId)
+ && Objects.equals(schema, that.schema)
&& Objects.equals(serverless, that.serverless)
&& Objects.equals(storage, that.storage)
&& Objects.equals(target, that.target)
@@ -394,6 +411,7 @@ public int hashCode() {
notifications,
photon,
pipelineId,
+ schema,
serverless,
storage,
target,
@@ -423,6 +441,7 @@ public String toString() {
.add("notifications", notifications)
.add("photon", photon)
.add("pipelineId", pipelineId)
+ .add("schema", schema)
.add("serverless", serverless)
.add("storage", storage)
.add("target", target)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionConfig.java
index 82ef6c4a7..92f853aed 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionConfig.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionConfig.java
@@ -9,6 +9,10 @@
@Generated
public class IngestionConfig {
+ /** Select tables from a specific source report. */
+ @JsonProperty("report")
+ private ReportSpec report;
+
/** Select tables from a specific source schema. */
@JsonProperty("schema")
private SchemaSpec schema;
@@ -17,6 +21,15 @@ public class IngestionConfig {
@JsonProperty("table")
private TableSpec table;
+ public IngestionConfig setReport(ReportSpec report) {
+ this.report = report;
+ return this;
+ }
+
+ public ReportSpec getReport() {
+ return report;
+ }
+
public IngestionConfig setSchema(SchemaSpec schema) {
this.schema = schema;
return this;
@@ -40,17 +53,20 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
IngestionConfig that = (IngestionConfig) o;
- return Objects.equals(schema, that.schema) && Objects.equals(table, that.table);
+ return Objects.equals(report, that.report)
+ && Objects.equals(schema, that.schema)
+ && Objects.equals(table, that.table);
}
@Override
public int hashCode() {
- return Objects.hash(schema, table);
+ return Objects.hash(report, schema, table);
}
@Override
public String toString() {
return new ToStringer(IngestionConfig.class)
+ .add("report", report)
.add("schema", schema)
.add("table", table)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java
index 8561d2586..c880ee65e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java
@@ -87,6 +87,13 @@ public class PipelineSpec {
@JsonProperty("photon")
private Boolean photon;
+ /**
+ * The default schema (database) where tables are read from or published to. The presence of this
+ * field implies that the pipeline is in direct publishing mode.
+ */
+ @JsonProperty("schema")
+ private String schema;
+
/** Whether serverless compute is enabled for this pipeline. */
@JsonProperty("serverless")
private Boolean serverless;
@@ -260,6 +267,15 @@ public Boolean getPhoton() {
return photon;
}
+ public PipelineSpec setSchema(String schema) {
+ this.schema = schema;
+ return this;
+ }
+
+ public String getSchema() {
+ return schema;
+ }
+
public PipelineSpec setServerless(Boolean serverless) {
this.serverless = serverless;
return this;
@@ -318,6 +334,7 @@ public boolean equals(Object o) {
&& Objects.equals(name, that.name)
&& Objects.equals(notifications, that.notifications)
&& Objects.equals(photon, that.photon)
+ && Objects.equals(schema, that.schema)
&& Objects.equals(serverless, that.serverless)
&& Objects.equals(storage, that.storage)
&& Objects.equals(target, that.target)
@@ -344,6 +361,7 @@ public int hashCode() {
name,
notifications,
photon,
+ schema,
serverless,
storage,
target,
@@ -370,6 +388,7 @@ public String toString() {
.add("name", name)
.add("notifications", notifications)
.add("photon", photon)
+ .add("schema", schema)
.add("serverless", serverless)
.add("storage", storage)
.add("target", target)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ReportSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ReportSpec.java
new file mode 100755
index 000000000..b737fbd9a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ReportSpec.java
@@ -0,0 +1,110 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.pipelines;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class ReportSpec {
+ /** Required. Destination catalog to store table. */
+ @JsonProperty("destination_catalog")
+ private String destinationCatalog;
+
+ /** Required. Destination schema to store table. */
+ @JsonProperty("destination_schema")
+ private String destinationSchema;
+
+ /**
+ * Required. Destination table name. The pipeline fails if a table with that name already exists.
+ */
+ @JsonProperty("destination_table")
+ private String destinationTable;
+
+ /** Required. Report URL in the source system. */
+ @JsonProperty("source_url")
+ private String sourceUrl;
+
+ /**
+ * Configuration settings to control the ingestion of tables. These settings override the
+ * table_configuration defined in the IngestionPipelineDefinition object.
+ */
+ @JsonProperty("table_configuration")
+ private TableSpecificConfig tableConfiguration;
+
+ public ReportSpec setDestinationCatalog(String destinationCatalog) {
+ this.destinationCatalog = destinationCatalog;
+ return this;
+ }
+
+ public String getDestinationCatalog() {
+ return destinationCatalog;
+ }
+
+ public ReportSpec setDestinationSchema(String destinationSchema) {
+ this.destinationSchema = destinationSchema;
+ return this;
+ }
+
+ public String getDestinationSchema() {
+ return destinationSchema;
+ }
+
+ public ReportSpec setDestinationTable(String destinationTable) {
+ this.destinationTable = destinationTable;
+ return this;
+ }
+
+ public String getDestinationTable() {
+ return destinationTable;
+ }
+
+ public ReportSpec setSourceUrl(String sourceUrl) {
+ this.sourceUrl = sourceUrl;
+ return this;
+ }
+
+ public String getSourceUrl() {
+ return sourceUrl;
+ }
+
+ public ReportSpec setTableConfiguration(TableSpecificConfig tableConfiguration) {
+ this.tableConfiguration = tableConfiguration;
+ return this;
+ }
+
+ public TableSpecificConfig getTableConfiguration() {
+ return tableConfiguration;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ReportSpec that = (ReportSpec) o;
+ return Objects.equals(destinationCatalog, that.destinationCatalog)
+ && Objects.equals(destinationSchema, that.destinationSchema)
+ && Objects.equals(destinationTable, that.destinationTable)
+ && Objects.equals(sourceUrl, that.sourceUrl)
+ && Objects.equals(tableConfiguration, that.tableConfiguration);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ destinationCatalog, destinationSchema, destinationTable, sourceUrl, tableConfiguration);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ReportSpec.class)
+ .add("destinationCatalog", destinationCatalog)
+ .add("destinationSchema", destinationSchema)
+ .add("destinationTable", destinationTable)
+ .add("sourceUrl", sourceUrl)
+ .add("tableConfiguration", tableConfiguration)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpec.java
index b7cf1ccb5..619922530 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpec.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpec.java
@@ -18,7 +18,7 @@ public class TableSpec {
private String destinationSchema;
/**
- * Optional. Destination table name. The pipeline fails If a table with that name already exists.
+ * Optional. Destination table name. The pipeline fails if a table with that name already exists.
* If not set, the source table name is used.
*/
@JsonProperty("destination_table")
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java
index ffec3bdeb..e3dff8b3f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java
@@ -25,6 +25,13 @@ public class TableSpecificConfig {
@JsonProperty("scd_type")
private TableSpecificConfigScdType scdType;
+ /**
+ * The column names specifying the logical order of events in the source data. Delta Live Tables
+ * uses this sequencing to handle change events that arrive out of order.
+ */
+ @JsonProperty("sequence_by")
+ private Collection Deletes the disable legacy DBFS setting for a workspace, reverting back to the default.
+ */
+ public DeleteDisableLegacyDbfsResponse delete(DeleteDisableLegacyDbfsRequest request) {
+ return impl.delete(request);
+ }
+
+ /**
+ * Get the disable legacy DBFS setting.
+ *
+ * Gets the disable legacy DBFS setting.
+ */
+ public DisableLegacyDbfs get(GetDisableLegacyDbfsRequest request) {
+ return impl.get(request);
+ }
+
+ public DisableLegacyDbfs update(
+ boolean allowMissing, DisableLegacyDbfs setting, String fieldMask) {
+ return update(
+ new UpdateDisableLegacyDbfsRequest()
+ .setAllowMissing(allowMissing)
+ .setSetting(setting)
+ .setFieldMask(fieldMask));
+ }
+
+ /**
+ * Update the disable legacy DBFS setting.
+ *
+ * Updates the disable legacy DBFS setting for the workspace.
+ */
+ public DisableLegacyDbfs update(UpdateDisableLegacyDbfsRequest request) {
+ return impl.update(request);
+ }
+
+ public DisableLegacyDbfsService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsImpl.java
new file mode 100755
index 000000000..856c1d61d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyDbfsImpl.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import java.util.HashMap;
+import java.util.Map;
+
+/** Package-local implementation of DisableLegacyDbfs */
+@Generated
+class DisableLegacyDbfsImpl implements DisableLegacyDbfsService {
+ private final ApiClient apiClient;
+
+ public DisableLegacyDbfsImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public DeleteDisableLegacyDbfsResponse delete(DeleteDisableLegacyDbfsRequest request) {
+ String path = "/api/2.0/settings/types/disable_legacy_dbfs/names/default";
+ Map This is the high-level interface, that contains generated methods.
+ *
+ * Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface DisableLegacyDbfsService {
+ /**
+ * Delete the disable legacy DBFS setting.
+ *
+ * Deletes the disable legacy DBFS setting for a workspace, reverting back to the default.
+ */
+ DeleteDisableLegacyDbfsResponse delete(
+ DeleteDisableLegacyDbfsRequest deleteDisableLegacyDbfsRequest);
+
+ /**
+ * Get the disable legacy DBFS setting.
+ *
+ * Gets the disable legacy DBFS setting.
+ */
+ DisableLegacyDbfs get(GetDisableLegacyDbfsRequest getDisableLegacyDbfsRequest);
+
+ /**
+ * Update the disable legacy DBFS setting.
+ *
+ * Updates the disable legacy DBFS setting for the workspace.
+ */
+ DisableLegacyDbfs update(UpdateDisableLegacyDbfsRequest updateDisableLegacyDbfsRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyDbfsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyDbfsRequest.java
new file mode 100755
index 000000000..d3f3545f9
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyDbfsRequest.java
@@ -0,0 +1,52 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Get the disable legacy DBFS setting */
+@Generated
+public class GetDisableLegacyDbfsRequest {
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+ * an etag from a GET request, and pass it with the DELETE request to identify the rule set
+ * version you are deleting.
+ */
+ @JsonIgnore
+ @QueryParam("etag")
+ private String etag;
+
+ public GetDisableLegacyDbfsRequest setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetDisableLegacyDbfsRequest that = (GetDisableLegacyDbfsRequest) o;
+ return Objects.equals(etag, that.etag);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(etag);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetDisableLegacyDbfsRequest.class).add("etag", etag).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java
index 1fde63913..e7f1c92a4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java
@@ -21,6 +21,8 @@ public class SettingsAPI {
private DisableLegacyAccessAPI disableLegacyAccessAPI;
+ private DisableLegacyDbfsAPI disableLegacyDbfsAPI;
+
private EnhancedSecurityMonitoringAPI enhancedSecurityMonitoringAPI;
private RestrictWorkspaceAdminsAPI restrictWorkspaceAdminsAPI;
@@ -37,6 +39,8 @@ public SettingsAPI(ApiClient apiClient) {
disableLegacyAccessAPI = new DisableLegacyAccessAPI(apiClient);
+ disableLegacyDbfsAPI = new DisableLegacyDbfsAPI(apiClient);
+
enhancedSecurityMonitoringAPI = new EnhancedSecurityMonitoringAPI(apiClient);
restrictWorkspaceAdminsAPI = new RestrictWorkspaceAdminsAPI(apiClient);
@@ -70,6 +74,14 @@ public DisableLegacyAccessAPI DisableLegacyAccess() {
return disableLegacyAccessAPI;
}
+ /**
+ * When this setting is on, access to DBFS root and DBFS mounts is disallowed (as well as creation
+ * of new mounts).
+ */
+ public DisableLegacyDbfsAPI DisableLegacyDbfs() {
+ return disableLegacyDbfsAPI;
+ }
+
/** Controls whether enhanced security monitoring is enabled for the current workspace. */
public EnhancedSecurityMonitoringAPI EnhancedSecurityMonitoring() {
return enhancedSecurityMonitoringAPI;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyDbfsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyDbfsRequest.java
new file mode 100755
index 000000000..6c657d6b3
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyDbfsRequest.java
@@ -0,0 +1,79 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Details required to update a setting. */
+@Generated
+public class UpdateDisableLegacyDbfsRequest {
+ /** This should always be set to true for Settings API. Added for AIP compliance. */
+ @JsonProperty("allow_missing")
+ private Boolean allowMissing;
+
+ /**
+ * Field mask is required to be passed into the PATCH request. Field mask specifies which fields
+ * of the setting payload will be updated. The field mask needs to be supplied as single string.
+ * To specify multiple fields in the field mask, use comma as the separator (no space).
+ */
+ @JsonProperty("field_mask")
+ private String fieldMask;
+
+ /** */
+ @JsonProperty("setting")
+ private DisableLegacyDbfs setting;
+
+ public UpdateDisableLegacyDbfsRequest setAllowMissing(Boolean allowMissing) {
+ this.allowMissing = allowMissing;
+ return this;
+ }
+
+ public Boolean getAllowMissing() {
+ return allowMissing;
+ }
+
+ public UpdateDisableLegacyDbfsRequest setFieldMask(String fieldMask) {
+ this.fieldMask = fieldMask;
+ return this;
+ }
+
+ public String getFieldMask() {
+ return fieldMask;
+ }
+
+ public UpdateDisableLegacyDbfsRequest setSetting(DisableLegacyDbfs setting) {
+ this.setting = setting;
+ return this;
+ }
+
+ public DisableLegacyDbfs getSetting() {
+ return setting;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateDisableLegacyDbfsRequest that = (UpdateDisableLegacyDbfsRequest) o;
+ return Objects.equals(allowMissing, that.allowMissing)
+ && Objects.equals(fieldMask, that.fieldMask)
+ && Objects.equals(setting, that.setting);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(allowMissing, fieldMask, setting);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateDisableLegacyDbfsRequest.class)
+ .add("allowMissing", allowMissing)
+ .add("fieldMask", fieldMask)
+ .add("setting", setting)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Alert.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Alert.java
index 7fa890a48..7f916cf5f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Alert.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Alert.java
@@ -46,6 +46,10 @@ public class Alert {
@JsonProperty("lifecycle_state")
private LifecycleState lifecycleState;
+ /** Whether to notify alert subscribers when alert returns back to normal. */
+ @JsonProperty("notify_on_ok")
+ private Boolean notifyOnOk;
+
/** The owner's username. This field is set to "Unavailable" if the user has been deleted. */
@JsonProperty("owner_user_name")
private String ownerUserName;
@@ -143,6 +147,15 @@ public LifecycleState getLifecycleState() {
return lifecycleState;
}
+ public Alert setNotifyOnOk(Boolean notifyOnOk) {
+ this.notifyOnOk = notifyOnOk;
+ return this;
+ }
+
+ public Boolean getNotifyOnOk() {
+ return notifyOnOk;
+ }
+
public Alert setOwnerUserName(String ownerUserName) {
this.ownerUserName = ownerUserName;
return this;
@@ -218,6 +231,7 @@ public boolean equals(Object o) {
&& Objects.equals(displayName, that.displayName)
&& Objects.equals(id, that.id)
&& Objects.equals(lifecycleState, that.lifecycleState)
+ && Objects.equals(notifyOnOk, that.notifyOnOk)
&& Objects.equals(ownerUserName, that.ownerUserName)
&& Objects.equals(parentPath, that.parentPath)
&& Objects.equals(queryId, that.queryId)
@@ -237,6 +251,7 @@ public int hashCode() {
displayName,
id,
lifecycleState,
+ notifyOnOk,
ownerUserName,
parentPath,
queryId,
@@ -256,6 +271,7 @@ public String toString() {
.add("displayName", displayName)
.add("id", id)
.add("lifecycleState", lifecycleState)
+ .add("notifyOnOk", notifyOnOk)
.add("ownerUserName", ownerUserName)
.add("parentPath", parentPath)
.add("queryId", queryId)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Channel.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Channel.java
index 5abee9767..4ed901e2a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Channel.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Channel.java
@@ -7,6 +7,10 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
+/**
+ * Configures the channel name and DBSQL version of the warehouse. CHANNEL_NAME_CUSTOM should be
+ * chosen only when `dbsql_version` is specified.
+ */
@Generated
public class Channel {
/** */
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java
index 3b9d4cbb0..82eb48e5c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java
@@ -9,6 +9,5 @@ public enum ChannelName {
CHANNEL_NAME_CURRENT,
CHANNEL_NAME_CUSTOM,
CHANNEL_NAME_PREVIEW,
- CHANNEL_NAME_PREVIOUS,
CHANNEL_NAME_UNSPECIFIED,
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequestAlert.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequestAlert.java
index aa0508b0e..80af13302 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequestAlert.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlertRequestAlert.java
@@ -34,6 +34,10 @@ public class CreateAlertRequestAlert {
@JsonProperty("display_name")
private String displayName;
+ /** Whether to notify alert subscribers when alert returns back to normal. */
+ @JsonProperty("notify_on_ok")
+ private Boolean notifyOnOk;
+
/** The workspace path of the folder containing the alert. */
@JsonProperty("parent_path")
private String parentPath;
@@ -85,6 +89,15 @@ public String getDisplayName() {
return displayName;
}
+ public CreateAlertRequestAlert setNotifyOnOk(Boolean notifyOnOk) {
+ this.notifyOnOk = notifyOnOk;
+ return this;
+ }
+
+ public Boolean getNotifyOnOk() {
+ return notifyOnOk;
+ }
+
public CreateAlertRequestAlert setParentPath(String parentPath) {
this.parentPath = parentPath;
return this;
@@ -121,6 +134,7 @@ public boolean equals(Object o) {
&& Objects.equals(customBody, that.customBody)
&& Objects.equals(customSubject, that.customSubject)
&& Objects.equals(displayName, that.displayName)
+ && Objects.equals(notifyOnOk, that.notifyOnOk)
&& Objects.equals(parentPath, that.parentPath)
&& Objects.equals(queryId, that.queryId)
&& Objects.equals(secondsToRetrigger, that.secondsToRetrigger);
@@ -129,7 +143,14 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
return Objects.hash(
- condition, customBody, customSubject, displayName, parentPath, queryId, secondsToRetrigger);
+ condition,
+ customBody,
+ customSubject,
+ displayName,
+ notifyOnOk,
+ parentPath,
+ queryId,
+ secondsToRetrigger);
}
@Override
@@ -139,6 +160,7 @@ public String toString() {
.add("customBody", customBody)
.add("customSubject", customSubject)
.add("displayName", displayName)
+ .add("notifyOnOk", notifyOnOk)
.add("parentPath", parentPath)
.add("queryId", queryId)
.add("secondsToRetrigger", secondsToRetrigger)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseRequest.java
index c976b5779..af89d90bb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateWarehouseRequest.java
@@ -13,7 +13,8 @@ public class CreateWarehouseRequest {
* The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries)
* before it is automatically stopped.
*
- * Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop.
+ * Supported values: - Must be >= 0 mins for serverless warehouses - Must be == 0 or >= 10 mins
+ * for non-serverless warehouses - 0 indicates no autostop.
*
* Defaults to 120 mins
*/
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsResponseAlert.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsResponseAlert.java
index bcca330c1..9489f808c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsResponseAlert.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ListAlertsResponseAlert.java
@@ -46,6 +46,10 @@ public class ListAlertsResponseAlert {
@JsonProperty("lifecycle_state")
private LifecycleState lifecycleState;
+ /** Whether to notify alert subscribers when alert returns back to normal. */
+ @JsonProperty("notify_on_ok")
+ private Boolean notifyOnOk;
+
/** The owner's username. This field is set to "Unavailable" if the user has been deleted. */
@JsonProperty("owner_user_name")
private String ownerUserName;
@@ -139,6 +143,15 @@ public LifecycleState getLifecycleState() {
return lifecycleState;
}
+ public ListAlertsResponseAlert setNotifyOnOk(Boolean notifyOnOk) {
+ this.notifyOnOk = notifyOnOk;
+ return this;
+ }
+
+ public Boolean getNotifyOnOk() {
+ return notifyOnOk;
+ }
+
public ListAlertsResponseAlert setOwnerUserName(String ownerUserName) {
this.ownerUserName = ownerUserName;
return this;
@@ -205,6 +218,7 @@ public boolean equals(Object o) {
&& Objects.equals(displayName, that.displayName)
&& Objects.equals(id, that.id)
&& Objects.equals(lifecycleState, that.lifecycleState)
+ && Objects.equals(notifyOnOk, that.notifyOnOk)
&& Objects.equals(ownerUserName, that.ownerUserName)
&& Objects.equals(queryId, that.queryId)
&& Objects.equals(secondsToRetrigger, that.secondsToRetrigger)
@@ -223,6 +237,7 @@ public int hashCode() {
displayName,
id,
lifecycleState,
+ notifyOnOk,
ownerUserName,
queryId,
secondsToRetrigger,
@@ -241,6 +256,7 @@ public String toString() {
.add("displayName", displayName)
.add("id", id)
.add("lifecycleState", lifecycleState)
+ .add("notifyOnOk", notifyOnOk)
.add("ownerUserName", ownerUserName)
.add("queryId", queryId)
.add("secondsToRetrigger", secondsToRetrigger)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequestAlert.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequestAlert.java
index 1f5502d4d..cae7b393b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequestAlert.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/UpdateAlertRequestAlert.java
@@ -34,6 +34,10 @@ public class UpdateAlertRequestAlert {
@JsonProperty("display_name")
private String displayName;
+ /** Whether to notify alert subscribers when alert returns back to normal. */
+ @JsonProperty("notify_on_ok")
+ private Boolean notifyOnOk;
+
/** The owner's username. This field is set to "Unavailable" if the user has been deleted. */
@JsonProperty("owner_user_name")
private String ownerUserName;
@@ -85,6 +89,15 @@ public String getDisplayName() {
return displayName;
}
+ public UpdateAlertRequestAlert setNotifyOnOk(Boolean notifyOnOk) {
+ this.notifyOnOk = notifyOnOk;
+ return this;
+ }
+
+ public Boolean getNotifyOnOk() {
+ return notifyOnOk;
+ }
+
public UpdateAlertRequestAlert setOwnerUserName(String ownerUserName) {
this.ownerUserName = ownerUserName;
return this;
@@ -121,6 +134,7 @@ public boolean equals(Object o) {
&& Objects.equals(customBody, that.customBody)
&& Objects.equals(customSubject, that.customSubject)
&& Objects.equals(displayName, that.displayName)
+ && Objects.equals(notifyOnOk, that.notifyOnOk)
&& Objects.equals(ownerUserName, that.ownerUserName)
&& Objects.equals(queryId, that.queryId)
&& Objects.equals(secondsToRetrigger, that.secondsToRetrigger);
@@ -133,6 +147,7 @@ public int hashCode() {
customBody,
customSubject,
displayName,
+ notifyOnOk,
ownerUserName,
queryId,
secondsToRetrigger);
@@ -145,6 +160,7 @@ public String toString() {
.add("customBody", customBody)
.add("customSubject", customSubject)
.add("displayName", displayName)
+ .add("notifyOnOk", notifyOnOk)
.add("ownerUserName", ownerUserName)
.add("queryId", queryId)
.add("secondsToRetrigger", secondsToRetrigger)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentials.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsRequest.java
similarity index 80%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentials.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsRequest.java
index b30d35e8f..fb02f7bf7 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentials.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsRequest.java
@@ -8,11 +8,11 @@
import java.util.Objects;
@Generated
-public class CreateCredentials {
+public class CreateCredentialsRequest {
/**
- * Git provider. This field is case-insensitive. The available Git providers are gitHub,
- * bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
- * gitLabEnterpriseEdition and awsCodeCommit.
+ * Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
+ * `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
+ * `gitLabEnterpriseEdition` and `awsCodeCommit`.
*/
@JsonProperty("git_provider")
private String gitProvider;
@@ -30,15 +30,14 @@ public class CreateCredentials {
/**
* The personal access token used to authenticate to the corresponding Git provider. For certain
- * providers, support may exist for other types of scoped access tokens. [Learn more]. The
- * personal access token used to authenticate to the corresponding Git
+ * providers, support may exist for other types of scoped access tokens. [Learn more].
*
* [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html
*/
@JsonProperty("personal_access_token")
private String personalAccessToken;
- public CreateCredentials setGitProvider(String gitProvider) {
+ public CreateCredentialsRequest setGitProvider(String gitProvider) {
this.gitProvider = gitProvider;
return this;
}
@@ -47,7 +46,7 @@ public String getGitProvider() {
return gitProvider;
}
- public CreateCredentials setGitUsername(String gitUsername) {
+ public CreateCredentialsRequest setGitUsername(String gitUsername) {
this.gitUsername = gitUsername;
return this;
}
@@ -56,7 +55,7 @@ public String getGitUsername() {
return gitUsername;
}
- public CreateCredentials setPersonalAccessToken(String personalAccessToken) {
+ public CreateCredentialsRequest setPersonalAccessToken(String personalAccessToken) {
this.personalAccessToken = personalAccessToken;
return this;
}
@@ -69,7 +68,7 @@ public String getPersonalAccessToken() {
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
- CreateCredentials that = (CreateCredentials) o;
+ CreateCredentialsRequest that = (CreateCredentialsRequest) o;
return Objects.equals(gitProvider, that.gitProvider)
&& Objects.equals(gitUsername, that.gitUsername)
&& Objects.equals(personalAccessToken, that.personalAccessToken);
@@ -82,7 +81,7 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(CreateCredentials.class)
+ return new ToStringer(CreateCredentialsRequest.class)
.add("gitProvider", gitProvider)
.add("gitUsername", gitUsername)
.add("personalAccessToken", personalAccessToken)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsResponse.java
index 5f693ebf0..d50aae0cb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsResponse.java
@@ -13,21 +13,13 @@ public class CreateCredentialsResponse {
@JsonProperty("credential_id")
private Long credentialId;
- /**
- * Git provider. This field is case-insensitive. The available Git providers are gitHub,
- * bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
- * gitLabEnterpriseEdition and awsCodeCommit.
- */
+ /** The Git provider associated with the credential. */
@JsonProperty("git_provider")
private String gitProvider;
/**
- * The username or email provided with your Git provider account, depending on which provider you
- * are using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or
- * username may be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS
- * CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers
- * please see your provider's Personal Access Token authentication documentation to see what is
- * supported.
+ * The username or email provided with your Git provider account and associated with the
+ * credential.
*/
@JsonProperty("git_username")
private String gitUsername;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoRequest.java
similarity index 76%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepo.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoRequest.java
index 6d4a0863c..84cd6aa77 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoRequest.java
@@ -8,18 +8,18 @@
import java.util.Objects;
@Generated
-public class CreateRepo {
+public class CreateRepoRequest {
/**
* Desired path for the repo in the workspace. Almost any path in the workspace can be chosen. If
- * repo is created in /Repos, path must be in the format /Repos/{folder}/{repo-name}.
+ * repo is created in `/Repos`, path must be in the format `/Repos/{folder}/{repo-name}`.
*/
@JsonProperty("path")
private String path;
/**
- * Git provider. This field is case-insensitive. The available Git providers are gitHub,
- * bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
- * gitLabEnterpriseEdition and awsCodeCommit.
+ * Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
+ * `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
+ * `gitLabEnterpriseEdition` and `awsCodeCommit`.
*/
@JsonProperty("provider")
private String provider;
@@ -35,7 +35,7 @@ public class CreateRepo {
@JsonProperty("url")
private String url;
- public CreateRepo setPath(String path) {
+ public CreateRepoRequest setPath(String path) {
this.path = path;
return this;
}
@@ -44,7 +44,7 @@ public String getPath() {
return path;
}
- public CreateRepo setProvider(String provider) {
+ public CreateRepoRequest setProvider(String provider) {
this.provider = provider;
return this;
}
@@ -53,7 +53,7 @@ public String getProvider() {
return provider;
}
- public CreateRepo setSparseCheckout(SparseCheckout sparseCheckout) {
+ public CreateRepoRequest setSparseCheckout(SparseCheckout sparseCheckout) {
this.sparseCheckout = sparseCheckout;
return this;
}
@@ -62,7 +62,7 @@ public SparseCheckout getSparseCheckout() {
return sparseCheckout;
}
- public CreateRepo setUrl(String url) {
+ public CreateRepoRequest setUrl(String url) {
this.url = url;
return this;
}
@@ -75,7 +75,7 @@ public String getUrl() {
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
- CreateRepo that = (CreateRepo) o;
+ CreateRepoRequest that = (CreateRepoRequest) o;
return Objects.equals(path, that.path)
&& Objects.equals(provider, that.provider)
&& Objects.equals(sparseCheckout, that.sparseCheckout)
@@ -89,7 +89,7 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(CreateRepo.class)
+ return new ToStringer(CreateRepoRequest.class)
.add("path", path)
.add("provider", provider)
.add("sparseCheckout", sparseCheckout)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoResponse.java
new file mode 100755
index 000000000..cb86465a7
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateRepoResponse.java
@@ -0,0 +1,134 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.workspace;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateRepoResponse {
+ /** Branch that the Git folder (repo) is checked out to. */
+ @JsonProperty("branch")
+ private String branch;
+
+ /** SHA-1 hash representing the commit ID of the current HEAD of the Git folder (repo). */
+ @JsonProperty("head_commit_id")
+ private String headCommitId;
+
+ /** ID of the Git folder (repo) object in the workspace. */
+ @JsonProperty("id")
+ private Long id;
+
+ /** Path of the Git folder (repo) in the workspace. */
+ @JsonProperty("path")
+ private String path;
+
+ /** Git provider of the linked Git repository. */
+ @JsonProperty("provider")
+ private String provider;
+
+ /** Sparse checkout settings for the Git folder (repo). */
+ @JsonProperty("sparse_checkout")
+ private SparseCheckout sparseCheckout;
+
+ /** URL of the linked Git repository. */
+ @JsonProperty("url")
+ private String url;
+
+ public CreateRepoResponse setBranch(String branch) {
+ this.branch = branch;
+ return this;
+ }
+
+ public String getBranch() {
+ return branch;
+ }
+
+ public CreateRepoResponse setHeadCommitId(String headCommitId) {
+ this.headCommitId = headCommitId;
+ return this;
+ }
+
+ public String getHeadCommitId() {
+ return headCommitId;
+ }
+
+ public CreateRepoResponse setId(Long id) {
+ this.id = id;
+ return this;
+ }
+
+ public Long getId() {
+ return id;
+ }
+
+ public CreateRepoResponse setPath(String path) {
+ this.path = path;
+ return this;
+ }
+
+ public String getPath() {
+ return path;
+ }
+
+ public CreateRepoResponse setProvider(String provider) {
+ this.provider = provider;
+ return this;
+ }
+
+ public String getProvider() {
+ return provider;
+ }
+
+ public CreateRepoResponse setSparseCheckout(SparseCheckout sparseCheckout) {
+ this.sparseCheckout = sparseCheckout;
+ return this;
+ }
+
+ public SparseCheckout getSparseCheckout() {
+ return sparseCheckout;
+ }
+
+ public CreateRepoResponse setUrl(String url) {
+ this.url = url;
+ return this;
+ }
+
+ public String getUrl() {
+ return url;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateRepoResponse that = (CreateRepoResponse) o;
+ return Objects.equals(branch, that.branch)
+ && Objects.equals(headCommitId, that.headCommitId)
+ && Objects.equals(id, that.id)
+ && Objects.equals(path, that.path)
+ && Objects.equals(provider, that.provider)
+ && Objects.equals(sparseCheckout, that.sparseCheckout)
+ && Objects.equals(url, that.url);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(branch, headCommitId, id, path, provider, sparseCheckout, url);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateRepoResponse.class)
+ .add("branch", branch)
+ .add("headCommitId", headCommitId)
+ .add("id", id)
+ .add("path", path)
+ .add("provider", provider)
+ .add("sparseCheckout", sparseCheckout)
+ .add("url", url)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CredentialInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CredentialInfo.java
index 5df03a4b4..ebb736500 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CredentialInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CredentialInfo.java
@@ -13,21 +13,13 @@ public class CredentialInfo {
@JsonProperty("credential_id")
private Long credentialId;
- /**
- * Git provider. This field is case-insensitive. The available Git providers are gitHub,
- * gitHubOAuth, bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
- * gitLabEnterpriseEdition and awsCodeCommit.
- */
+ /** The Git provider associated with the credential. */
@JsonProperty("git_provider")
private String gitProvider;
/**
- * The username or email provided with your Git provider account, depending on which provider you
- * are using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or
- * username may be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS
- * CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers
- * please see your provider's Personal Access Token authentication documentation to see what is
- * supported.
+ * The username or email provided with your Git provider account and associated with the
+ * credential.
*/
@JsonProperty("git_username")
private String gitUsername;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteGitCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsRequest.java
similarity index 79%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteGitCredentialRequest.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsRequest.java
index 728bef583..103c730f8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteGitCredentialRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsRequest.java
@@ -9,11 +9,11 @@
/** Delete a credential */
@Generated
-public class DeleteGitCredentialRequest {
+public class DeleteCredentialsRequest {
/** The ID for the corresponding credential to access. */
@JsonIgnore private Long credentialId;
- public DeleteGitCredentialRequest setCredentialId(Long credentialId) {
+ public DeleteCredentialsRequest setCredentialId(Long credentialId) {
this.credentialId = credentialId;
return this;
}
@@ -26,7 +26,7 @@ public Long getCredentialId() {
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
- DeleteGitCredentialRequest that = (DeleteGitCredentialRequest) o;
+ DeleteCredentialsRequest that = (DeleteCredentialsRequest) o;
return Objects.equals(credentialId, that.credentialId);
}
@@ -37,7 +37,7 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(DeleteGitCredentialRequest.class)
+ return new ToStringer(DeleteCredentialsRequest.class)
.add("credentialId", credentialId)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsResponse.java
new file mode 100755
index 000000000..3b1fb2ec7
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteCredentialsResponse.java
@@ -0,0 +1,28 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.workspace;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+@Generated
+public class DeleteCredentialsResponse {
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash();
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteCredentialsResponse.class).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoRequest.java
index 0d53417be..72e27bbe5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoRequest.java
@@ -10,7 +10,7 @@
/** Delete a repo */
@Generated
public class DeleteRepoRequest {
- /** The ID for the corresponding repo to access. */
+ /** ID of the Git folder (repo) object in the workspace. */
@JsonIgnore private Long repoId;
public DeleteRepoRequest setRepoId(Long repoId) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoResponse.java
similarity index 84%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateResponse.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoResponse.java
index f4dcbc7ea..2fafce6f5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/DeleteRepoResponse.java
@@ -7,7 +7,7 @@
import java.util.Objects;
@Generated
-public class UpdateResponse {
+public class DeleteRepoResponse {
@Override
public boolean equals(Object o) {
@@ -23,6 +23,6 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(UpdateResponse.class).toString();
+ return new ToStringer(DeleteRepoResponse.class).toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetGitCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsRequest.java
similarity index 75%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetGitCredentialRequest.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsRequest.java
index 1539a99d9..2dea34f1e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetGitCredentialRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsRequest.java
@@ -9,11 +9,11 @@
/** Get a credential entry */
@Generated
-public class GetGitCredentialRequest {
+public class GetCredentialsRequest {
/** The ID for the corresponding credential to access. */
@JsonIgnore private Long credentialId;
- public GetGitCredentialRequest setCredentialId(Long credentialId) {
+ public GetCredentialsRequest setCredentialId(Long credentialId) {
this.credentialId = credentialId;
return this;
}
@@ -26,7 +26,7 @@ public Long getCredentialId() {
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
- GetGitCredentialRequest that = (GetGitCredentialRequest) o;
+ GetCredentialsRequest that = (GetCredentialsRequest) o;
return Objects.equals(credentialId, that.credentialId);
}
@@ -37,8 +37,6 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(GetGitCredentialRequest.class)
- .add("credentialId", credentialId)
- .toString();
+ return new ToStringer(GetCredentialsRequest.class).add("credentialId", credentialId).toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsResponse.java
index 674ebd734..ef4da2906 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GetCredentialsResponse.java
@@ -5,22 +5,50 @@
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
import com.fasterxml.jackson.annotation.JsonProperty;
-import java.util.Collection;
import java.util.Objects;
@Generated
public class GetCredentialsResponse {
- /** */
- @JsonProperty("credentials")
- private Collection Deletes the specified Git credential.
*/
- public void delete(DeleteGitCredentialRequest request) {
+ public void delete(DeleteCredentialsRequest request) {
impl.delete(request);
}
- public CredentialInfo get(long credentialId) {
- return get(new GetGitCredentialRequest().setCredentialId(credentialId));
+ public GetCredentialsResponse get(long credentialId) {
+ return get(new GetCredentialsRequest().setCredentialId(credentialId));
}
/**
@@ -67,7 +67,7 @@ public CredentialInfo get(long credentialId) {
*
* Gets the Git credential with the specified credential ID.
*/
- public CredentialInfo get(GetGitCredentialRequest request) {
+ public GetCredentialsResponse get(GetCredentialsRequest request) {
return impl.get(request);
}
@@ -78,11 +78,12 @@ public CredentialInfo get(GetGitCredentialRequest request) {
*/
public Iterable Updates the specified Git credential.
*/
- public void update(UpdateCredentials request) {
+ public void update(UpdateCredentialsRequest request) {
impl.update(request);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsImpl.java
index f17d5055a..8837469e3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsImpl.java
@@ -16,7 +16,7 @@ public GitCredentialsImpl(ApiClient apiClient) {
}
@Override
- public CreateCredentialsResponse create(CreateCredentials request) {
+ public CreateCredentialsResponse create(CreateCredentialsRequest request) {
String path = "/api/2.0/git-credentials";
Map Deletes the specified Git credential.
*/
- void delete(DeleteGitCredentialRequest deleteGitCredentialRequest);
+ void delete(DeleteCredentialsRequest deleteCredentialsRequest);
/**
* Get a credential entry.
*
* Gets the Git credential with the specified credential ID.
*/
- CredentialInfo get(GetGitCredentialRequest getGitCredentialRequest);
+ GetCredentialsResponse get(GetCredentialsRequest getCredentialsRequest);
/**
* Get Git credentials.
*
* Lists the calling user's Git credentials. One credential per user is supported.
*/
- GetCredentialsResponse list();
+ ListCredentialsResponse list();
/**
* Update a credential.
*
* Updates the specified Git credential.
*/
- void update(UpdateCredentials updateCredentials);
+ void update(UpdateCredentialsRequest updateCredentialsRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListCredentialsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListCredentialsResponse.java
new file mode 100755
index 000000000..6f3c22c24
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ListCredentialsResponse.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.workspace;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ListCredentialsResponse {
+ /** List of credentials. */
+ @JsonProperty("credentials")
+ private Collection Returns the repo with the given repo ID.
*/
- public RepoInfo get(GetRepoRequest request) {
+ public GetRepoResponse get(GetRepoRequest request) {
return impl.get(request);
}
@@ -106,8 +106,8 @@ public RepoPermissions getPermissions(GetRepoPermissionsRequest request) {
/**
* Get repos.
*
- * Returns repos that the calling user has Manage permissions on. Results are paginated with
- * each page containing twenty repos.
+ * Returns repos that the calling user has Manage permissions on. Use `next_page_token` to
+ * iterate through additional pages.
*/
public Iterable Updates the repo to a different branch or tag, or updates the repo to the latest commit on
* the same branch.
*/
- public void update(UpdateRepo request) {
+ public void update(UpdateRepoRequest request) {
impl.update(request);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposImpl.java
index d3a1f9090..03def8f93 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/ReposImpl.java
@@ -16,27 +16,28 @@ public ReposImpl(ApiClient apiClient) {
}
@Override
- public RepoInfo create(CreateRepo request) {
+ public CreateRepoResponse create(CreateRepoRequest request) {
String path = "/api/2.0/repos";
Map Returns the repo with the given repo ID.
*/
- RepoInfo get(GetRepoRequest getRepoRequest);
+ GetRepoResponse get(GetRepoRequest getRepoRequest);
/**
* Get repo permission levels.
@@ -62,8 +62,8 @@ GetRepoPermissionLevelsResponse getPermissionLevels(
/**
* Get repos.
*
- * Returns repos that the calling user has Manage permissions on. Results are paginated with
- * each page containing twenty repos.
+ * Returns repos that the calling user has Manage permissions on. Use `next_page_token` to
+ * iterate through additional pages.
*/
ListReposResponse list(ListReposRequest listReposRequest);
@@ -80,7 +80,7 @@ GetRepoPermissionLevelsResponse getPermissionLevels(
* Updates the repo to a different branch or tag, or updates the repo to the latest commit on
* the same branch.
*/
- void update(UpdateRepo updateRepo);
+ void update(UpdateRepoRequest updateRepoRequest);
/**
* Update repo permissions.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckout.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckout.java
index 4736bb3b9..eb1201e43 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckout.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SparseCheckout.java
@@ -8,9 +8,15 @@
import java.util.Collection;
import java.util.Objects;
+/** Sparse checkout configuration, it contains options like cone patterns. */
@Generated
public class SparseCheckout {
- /** List of patterns to include for sparse checkout. */
+ /**
+ * List of sparse checkout cone patterns, see [cone mode handling] for details.
+ *
+ * [cone mode handling]:
+ * https://git-scm.com/docs/git-sparse-checkout#_internalscone_mode_handling
+ */
@JsonProperty("patterns")
private Collection [cone mode handling]:
+ * https://git-scm.com/docs/git-sparse-checkout#_internalscone_mode_handling
+ */
@JsonProperty("patterns")
private Collection [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html
*/
@JsonProperty("personal_access_token")
private String personalAccessToken;
- public UpdateCredentials setCredentialId(Long credentialId) {
+ public UpdateCredentialsRequest setCredentialId(Long credentialId) {
this.credentialId = credentialId;
return this;
}
@@ -51,7 +50,7 @@ public Long getCredentialId() {
return credentialId;
}
- public UpdateCredentials setGitProvider(String gitProvider) {
+ public UpdateCredentialsRequest setGitProvider(String gitProvider) {
this.gitProvider = gitProvider;
return this;
}
@@ -60,7 +59,7 @@ public String getGitProvider() {
return gitProvider;
}
- public UpdateCredentials setGitUsername(String gitUsername) {
+ public UpdateCredentialsRequest setGitUsername(String gitUsername) {
this.gitUsername = gitUsername;
return this;
}
@@ -69,7 +68,7 @@ public String getGitUsername() {
return gitUsername;
}
- public UpdateCredentials setPersonalAccessToken(String personalAccessToken) {
+ public UpdateCredentialsRequest setPersonalAccessToken(String personalAccessToken) {
this.personalAccessToken = personalAccessToken;
return this;
}
@@ -82,7 +81,7 @@ public String getPersonalAccessToken() {
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
- UpdateCredentials that = (UpdateCredentials) o;
+ UpdateCredentialsRequest that = (UpdateCredentialsRequest) o;
return Objects.equals(credentialId, that.credentialId)
&& Objects.equals(gitProvider, that.gitProvider)
&& Objects.equals(gitUsername, that.gitUsername)
@@ -96,7 +95,7 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(UpdateCredentials.class)
+ return new ToStringer(UpdateCredentialsRequest.class)
.add("credentialId", credentialId)
.add("gitProvider", gitProvider)
.add("gitUsername", gitUsername)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsResponse.java
new file mode 100755
index 000000000..20e001bd3
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentialsResponse.java
@@ -0,0 +1,28 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.workspace;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+@Generated
+public class UpdateCredentialsResponse {
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash();
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateCredentialsResponse.class).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoRequest.java
similarity index 83%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepo.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoRequest.java
index 205b1f7ea..107125ef8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoRequest.java
@@ -9,12 +9,12 @@
import java.util.Objects;
@Generated
-public class UpdateRepo {
+public class UpdateRepoRequest {
/** Branch that the local version of the repo is checked out to. */
@JsonProperty("branch")
private String branch;
- /** The ID for the corresponding repo to access. */
+ /** ID of the Git folder (repo) object in the workspace. */
@JsonIgnore private Long repoId;
/**
@@ -32,7 +32,7 @@ public class UpdateRepo {
@JsonProperty("tag")
private String tag;
- public UpdateRepo setBranch(String branch) {
+ public UpdateRepoRequest setBranch(String branch) {
this.branch = branch;
return this;
}
@@ -41,7 +41,7 @@ public String getBranch() {
return branch;
}
- public UpdateRepo setRepoId(Long repoId) {
+ public UpdateRepoRequest setRepoId(Long repoId) {
this.repoId = repoId;
return this;
}
@@ -50,7 +50,7 @@ public Long getRepoId() {
return repoId;
}
- public UpdateRepo setSparseCheckout(SparseCheckoutUpdate sparseCheckout) {
+ public UpdateRepoRequest setSparseCheckout(SparseCheckoutUpdate sparseCheckout) {
this.sparseCheckout = sparseCheckout;
return this;
}
@@ -59,7 +59,7 @@ public SparseCheckoutUpdate getSparseCheckout() {
return sparseCheckout;
}
- public UpdateRepo setTag(String tag) {
+ public UpdateRepoRequest setTag(String tag) {
this.tag = tag;
return this;
}
@@ -72,7 +72,7 @@ public String getTag() {
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
- UpdateRepo that = (UpdateRepo) o;
+ UpdateRepoRequest that = (UpdateRepoRequest) o;
return Objects.equals(branch, that.branch)
&& Objects.equals(repoId, that.repoId)
&& Objects.equals(sparseCheckout, that.sparseCheckout)
@@ -86,7 +86,7 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(UpdateRepo.class)
+ return new ToStringer(UpdateRepoRequest.class)
.add("branch", branch)
.add("repoId", repoId)
.add("sparseCheckout", sparseCheckout)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoResponse.java
new file mode 100755
index 000000000..c7d596164
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateRepoResponse.java
@@ -0,0 +1,28 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.workspace;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+@Generated
+public class UpdateRepoResponse {
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash();
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateRepoResponse.class).toString();
+ }
+}
diff --git a/examples/docs/pom.xml b/examples/docs/pom.xml
index 49e56d1ee..504a5cb2f 100644
--- a/examples/docs/pom.xml
+++ b/examples/docs/pom.xml
@@ -24,7 +24,7 @@