diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index c9ce6cc2e..f705ffea6 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -0e0d4cbe87193e36c73b8b2be3b0dd0f1b013e00 \ No newline at end of file +e05401ed5dd4974c5333d737ec308a7d451f749f \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index d17eb3a05..6bbee7a7a 100755 --- a/.gitattributes +++ b/.gitattributes @@ -87,6 +87,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunct databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunctionSqlDataAccess.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastore.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastoreAssignment.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMonitor.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRegisteredModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSchema.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java linguist-generated=true @@ -104,6 +105,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteCatal databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteConnectionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteExternalLocationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteFunctionRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteLakehouseMonitorRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteMetastoreRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteModelVersionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteRegisteredModelRequest.java linguist-generated=true @@ -126,6 +128,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnablePredi databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EnableSchemaName.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EncryptionDetails.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExistsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsImpl.java linguist-generated=true @@ -156,6 +159,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetEffectiv databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetLakehouseMonitorRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponseDeltaSharingScope.java linguist-generated=true @@ -169,6 +173,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsAPI.j databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GrantsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/IsolationMode.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMonitorsAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMonitorsImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMonitorsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsRequest.java linguist-generated=true @@ -207,6 +214,18 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersio databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedule.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedulePauseStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCustomMetric.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCustomMetricType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDataClassificationConfig.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDestinations.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProfileType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProfileTypeProblemType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfoStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorNotificationsConfig.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeriesProfileType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NamedTableConstraint.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsChange.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsList.java linguist-generated=true @@ -244,6 +263,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstr databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstraintsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableDependency.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableExistsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableRowFilter.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableSummary.java linguist-generated=true @@ -260,6 +280,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetas databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreAssignment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMetastoreDeltaSharingScope.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateModelVersionRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMonitor.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdatePermissions.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateRegisteredModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateSchema.java linguist-generated=true @@ -1216,6 +1237,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetPrivate databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetStatusRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenManagementRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenPermissionLevelsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsImpl.java linguist-generated=true diff --git a/CHANGELOG.md b/CHANGELOG.md index 4ada602fd..b254c6ac9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,54 @@ # Version changelog +## 0.17.0 + +Bugfixes: + +* Update SDK to OpenAPI spec with bug fix ([#207](https://github.com/databricks/databricks-sdk-java/pull/207)). +* Fix Azure Databricks OAuth M2M ([#209](https://github.com/databricks/databricks-sdk-java/pull/209)). + +API Changes: + + * Added `exists()` method for `workspaceClient.tables()` service. + * Added `workspaceClient.lakehouseMonitors()` service. + * Removed `com.databricks.sdk.service.catalog.TableConstraintList` class. + * Added `initScripts` field for `com.databricks.sdk.service.pipelines.PipelineCluster`. + * Added the following dataclasses: + `databricks.sdk.service.catalog.CreateMonitor`, + `databricks.sdk.service.catalog.DeleteLakehouseMonitorRequest`, + `databricks.sdk.service.catalog.ExistsRequest`, + `databricks.sdk.service.catalog.GetLakehouseMonitorRequest`, + `databricks.sdk.service.catalog.MonitorCronSchedule`, + `databricks.sdk.service.catalog.MonitorCronSchedulePauseStatus`, + `databricks.sdk.service.catalog.MonitorCustomMetric`, + `databricks.sdk.service.catalog.MonitorCustomMetricType`, + `databricks.sdk.service.catalog.MonitorDataClassificationConfig`, + `databricks.sdk.service.catalog.MonitorDestinations`, + `databricks.sdk.service.catalog.MonitorInferenceLogProfileType`, + `databricks.sdk.service.catalog.MonitorInferenceLogProfileTypeProblemType`, + `databricks.sdk.service.catalog.MonitorInfo`, + `databricks.sdk.service.catalog.MonitorInfoStatus`, + `databricks.sdk.service.catalog.MonitorNotificationsConfig`, + `databricks.sdk.service.catalog.MonitorTimeSeriesProfileType`, + `databricks.sdk.service.catalog.TableExistsResponse` and + `databricks.sdk.service.catalog.UpdateMonitor`. +* Added `validateOnly` field for `com.databricks.sdk.service.pipelines.StartUpdate`. + * Added `validateOnly` field for `com.databricks.sdk.service.pipelines.UpdateInfo`. + * Changed `createOboToken()` method for `workspaceClient.tokenManagement()` service with new required argument order. + * Changed `get()` method for `workspaceClient.tokenManagement()` service to return `com.databricks.sdk.service.settings.GetTokenResponse` class. + * Changed `lifetimeSeconds` field for `com.databricks.sdk.service.settings.CreateOboTokenRequest` to no longer be required. + * Added `com.databricks.sdk.service.settings.GetTokenResponse` class. + * Changed `create()` method for `workspaceClient.dashboards()` service . New request type is `com.databricks.sdk.service.sql.DashboardPostContent` class. + * Added `update()` method for `workspaceClient.dashboards()` service. + * Removed `com.databricks.sdk.service.sql.CreateDashboardRequest` class. + * Added `httpHeaders` field for `com.databricks.sdk.service.sql.ExternalLink`. + * Added `runAsRole` field for `com.databricks.sdk.service.sql.QueryEditContent`. + * Added `com.databricks.sdk.service.sql.DashboardEditContent` class. + * Added `com.databricks.sdk.service.sql.DashboardPostContent` class. + +OpenAPI SHA: e05401ed5dd4974c5333d737ec308a7d451f749f, Date: 2024-01-23 + + ## 0.16.0 * Update to OpenAPI spec ([#204](https://github.com/databricks/databricks-sdk-java/pull/204)). diff --git a/databricks-sdk-java/pom.xml b/databricks-sdk-java/pom.xml index f1537ba16..b92aa2f8d 100644 --- a/databricks-sdk-java/pom.xml +++ b/databricks-sdk-java/pom.xml @@ -5,7 +5,7 @@ com.databricks databricks-sdk-parent - 0.16.0 + 0.17.0 databricks-sdk-java diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index 72ab50c52..b669bfa00 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -20,6 +20,8 @@ import com.databricks.sdk.service.catalog.FunctionsService; import com.databricks.sdk.service.catalog.GrantsAPI; import com.databricks.sdk.service.catalog.GrantsService; +import com.databricks.sdk.service.catalog.LakehouseMonitorsAPI; +import com.databricks.sdk.service.catalog.LakehouseMonitorsService; import com.databricks.sdk.service.catalog.MetastoresAPI; import com.databricks.sdk.service.catalog.MetastoresService; import com.databricks.sdk.service.catalog.ModelVersionsAPI; @@ -174,6 +176,7 @@ public class WorkspaceClient { private InstanceProfilesAPI instanceProfilesAPI; private IpAccessListsAPI ipAccessListsAPI; private JobsAPI jobsAPI; + private LakehouseMonitorsAPI lakehouseMonitorsAPI; private LakeviewAPI lakeviewAPI; private LibrariesAPI librariesAPI; private MetastoresAPI metastoresAPI; @@ -249,6 +252,7 @@ public WorkspaceClient(DatabricksConfig config) { instanceProfilesAPI = new InstanceProfilesAPI(apiClient); ipAccessListsAPI = new IpAccessListsAPI(apiClient); jobsAPI = new JobsAPI(apiClient); + lakehouseMonitorsAPI = new LakehouseMonitorsAPI(apiClient); lakeviewAPI = new LakeviewAPI(apiClient); librariesAPI = new LibrariesAPI(apiClient); metastoresAPI = new MetastoresAPI(apiClient); @@ -366,10 +370,10 @@ public CleanRoomsAPI cleanRooms() { * creation. Cluster policies have ACLs that limit their use to specific users and groups. * *

With cluster policies, you can: - Auto-install cluster libraries on the next restart by - * listing them in the policy's "libraries" field. - Limit users to creating clusters with the - * prescribed settings. - Simplify the user interface, enabling more users to create clusters, by - * fixing and hiding some fields. - Manage costs by setting limits on attributes that impact the - * hourly rate. + * listing them in the policy's "libraries" field (Public Preview). - Limit users to creating + * clusters with the prescribed settings. - Simplify the user interface, enabling more users to + * create clusters, by fixing and hiding some fields. - Manage costs by setting limits on + * attributes that impact the hourly rate. * *

Cluster policy permissions limit which policies a user can select in the Policy drop-down * when the user creates a cluster: - A user who has unrestricted cluster create permission can @@ -693,6 +697,20 @@ public JobsAPI jobs() { return jobsAPI; } + /** + * A monitor computes and monitors data or model quality metrics for a table over time. It + * generates metrics tables and a dashboard that you can use to monitor table health and set + * alerts. + * + *

Most write operations require the user to be the owner of the table (or its parent schema or + * parent catalog). Viewing the dashboard, computed metrics, or monitor configuration only + * requires the user to have **SELECT** privileges on the table (along with **USE_SCHEMA** and + * **USE_CATALOG**). + */ + public LakehouseMonitorsAPI lakehouseMonitors() { + return lakehouseMonitorsAPI; + } + /** * These APIs provide specific management operations for Lakeview dashboards. Generic resource * management can be done with Workspace API (import, export, get-status, list, delete). @@ -1480,6 +1498,12 @@ public WorkspaceClient withJobsImpl(JobsService jobs) { return this; } + /** Replace LakehouseMonitorsAPI implementation with mock */ + public WorkspaceClient withLakehouseMonitorsImpl(LakehouseMonitorsService lakehouseMonitors) { + lakehouseMonitorsAPI = new LakehouseMonitorsAPI(lakehouseMonitors); + return this; + } + /** Replace LakeviewAPI implementation with mock */ public WorkspaceClient withLakeviewImpl(LakeviewService lakeview) { lakeviewAPI = new LakeviewAPI(lakeview); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java index 514b5ac82..875dd24a8 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java @@ -13,7 +13,7 @@ public class UserAgent { // TODO: check if reading from // /META-INF/maven/com.databricks/databrics-sdk-java/pom.properties // or getClass().getPackage().getImplementationVersion() is enough. - private static final String version = "0.16.0"; + private static final String version = "0.17.0"; public static void withProduct(String product, String productVersion) { UserAgent.product = product; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMonitor.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMonitor.java new file mode 100755 index 000000000..b690ab23b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMonitor.java @@ -0,0 +1,269 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class CreateMonitor { + /** The directory to store monitoring assets (e.g. dashboard, metric tables). */ + @JsonProperty("assets_dir") + private String assetsDir; + + /** + * Name of the baseline table from which drift metrics are computed from. Columns in the monitored + * table should also be present in the baseline table. + */ + @JsonProperty("baseline_table_name") + private String baselineTableName; + + /** + * Custom metrics to compute on the monitored table. These can be aggregate metrics, derived + * metrics (from already computed aggregate metrics), or drift metrics (comparing metrics across + * time windows). + */ + @JsonProperty("custom_metrics") + private Collection customMetrics; + + /** The data classification config for the monitor. */ + @JsonProperty("data_classification_config") + private MonitorDataClassificationConfig dataClassificationConfig; + + /** Full name of the table. */ + private String fullName; + + /** Configuration for monitoring inference logs. */ + @JsonProperty("inference_log") + private MonitorInferenceLogProfileType inferenceLog; + + /** The notification settings for the monitor. */ + @JsonProperty("notifications") + private Collection notifications; + + /** Schema where output metric tables are created. */ + @JsonProperty("output_schema_name") + private String outputSchemaName; + + /** The schedule for automatically updating and refreshing metric tables. */ + @JsonProperty("schedule") + private MonitorCronSchedule schedule; + + /** Whether to skip creating a default dashboard summarizing data quality metrics. */ + @JsonProperty("skip_builtin_dashboard") + private Boolean skipBuiltinDashboard; + + /** + * List of column expressions to slice data with for targeted analysis. The data is grouped by + * each expression independently, resulting in a separate slice for each predicate and its + * complements. For high-cardinality columns, only the top 100 unique values by frequency will + * generate slices. + */ + @JsonProperty("slicing_exprs") + private Collection slicingExprs; + + /** Configuration for monitoring snapshot tables. */ + @JsonProperty("snapshot") + private Object snapshot; + + /** Configuration for monitoring time series tables. */ + @JsonProperty("time_series") + private MonitorTimeSeriesProfileType timeSeries; + + /** + * Optional argument to specify the warehouse for dashboard creation. If not specified, the first + * running warehouse will be used. + */ + @JsonProperty("warehouse_id") + private String warehouseId; + + public CreateMonitor setAssetsDir(String assetsDir) { + this.assetsDir = assetsDir; + return this; + } + + public String getAssetsDir() { + return assetsDir; + } + + public CreateMonitor setBaselineTableName(String baselineTableName) { + this.baselineTableName = baselineTableName; + return this; + } + + public String getBaselineTableName() { + return baselineTableName; + } + + public CreateMonitor setCustomMetrics(Collection customMetrics) { + this.customMetrics = customMetrics; + return this; + } + + public Collection getCustomMetrics() { + return customMetrics; + } + + public CreateMonitor setDataClassificationConfig( + MonitorDataClassificationConfig dataClassificationConfig) { + this.dataClassificationConfig = dataClassificationConfig; + return this; + } + + public MonitorDataClassificationConfig getDataClassificationConfig() { + return dataClassificationConfig; + } + + public CreateMonitor setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public CreateMonitor setInferenceLog(MonitorInferenceLogProfileType inferenceLog) { + this.inferenceLog = inferenceLog; + return this; + } + + public MonitorInferenceLogProfileType getInferenceLog() { + return inferenceLog; + } + + public CreateMonitor setNotifications(Collection notifications) { + this.notifications = notifications; + return this; + } + + public Collection getNotifications() { + return notifications; + } + + public CreateMonitor setOutputSchemaName(String outputSchemaName) { + this.outputSchemaName = outputSchemaName; + return this; + } + + public String getOutputSchemaName() { + return outputSchemaName; + } + + public CreateMonitor setSchedule(MonitorCronSchedule schedule) { + this.schedule = schedule; + return this; + } + + public MonitorCronSchedule getSchedule() { + return schedule; + } + + public CreateMonitor setSkipBuiltinDashboard(Boolean skipBuiltinDashboard) { + this.skipBuiltinDashboard = skipBuiltinDashboard; + return this; + } + + public Boolean getSkipBuiltinDashboard() { + return skipBuiltinDashboard; + } + + public CreateMonitor setSlicingExprs(Collection slicingExprs) { + this.slicingExprs = slicingExprs; + return this; + } + + public Collection getSlicingExprs() { + return slicingExprs; + } + + public CreateMonitor setSnapshot(Object snapshot) { + this.snapshot = snapshot; + return this; + } + + public Object getSnapshot() { + return snapshot; + } + + public CreateMonitor setTimeSeries(MonitorTimeSeriesProfileType timeSeries) { + this.timeSeries = timeSeries; + return this; + } + + public MonitorTimeSeriesProfileType getTimeSeries() { + return timeSeries; + } + + public CreateMonitor setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateMonitor that = (CreateMonitor) o; + return Objects.equals(assetsDir, that.assetsDir) + && Objects.equals(baselineTableName, that.baselineTableName) + && Objects.equals(customMetrics, that.customMetrics) + && Objects.equals(dataClassificationConfig, that.dataClassificationConfig) + && Objects.equals(fullName, that.fullName) + && Objects.equals(inferenceLog, that.inferenceLog) + && Objects.equals(notifications, that.notifications) + && Objects.equals(outputSchemaName, that.outputSchemaName) + && Objects.equals(schedule, that.schedule) + && Objects.equals(skipBuiltinDashboard, that.skipBuiltinDashboard) + && Objects.equals(slicingExprs, that.slicingExprs) + && Objects.equals(snapshot, that.snapshot) + && Objects.equals(timeSeries, that.timeSeries) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash( + assetsDir, + baselineTableName, + customMetrics, + dataClassificationConfig, + fullName, + inferenceLog, + notifications, + outputSchemaName, + schedule, + skipBuiltinDashboard, + slicingExprs, + snapshot, + timeSeries, + warehouseId); + } + + @Override + public String toString() { + return new ToStringer(CreateMonitor.class) + .add("assetsDir", assetsDir) + .add("baselineTableName", baselineTableName) + .add("customMetrics", customMetrics) + .add("dataClassificationConfig", dataClassificationConfig) + .add("fullName", fullName) + .add("inferenceLog", inferenceLog) + .add("notifications", notifications) + .add("outputSchemaName", outputSchemaName) + .add("schedule", schedule) + .add("skipBuiltinDashboard", skipBuiltinDashboard) + .add("slicingExprs", slicingExprs) + .add("snapshot", snapshot) + .add("timeSeries", timeSeries) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteLakehouseMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteLakehouseMonitorRequest.java new file mode 100755 index 000000000..ecd871b14 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteLakehouseMonitorRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** Delete a table monitor */ +@Generated +public class DeleteLakehouseMonitorRequest { + /** Full name of the table. */ + private String fullName; + + public DeleteLakehouseMonitorRequest setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteLakehouseMonitorRequest that = (DeleteLakehouseMonitorRequest) o; + return Objects.equals(fullName, that.fullName); + } + + @Override + public int hashCode() { + return Objects.hash(fullName); + } + + @Override + public String toString() { + return new ToStringer(DeleteLakehouseMonitorRequest.class).add("fullName", fullName).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExistsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExistsRequest.java new file mode 100755 index 000000000..8aeb430d3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExistsRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** Get boolean reflecting if table exists */ +@Generated +public class ExistsRequest { + /** Full name of the table. */ + private String fullName; + + public ExistsRequest setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExistsRequest that = (ExistsRequest) o; + return Objects.equals(fullName, that.fullName); + } + + @Override + public int hashCode() { + return Objects.hash(fullName); + } + + @Override + public String toString() { + return new ToStringer(ExistsRequest.class).add("fullName", fullName).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetLakehouseMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetLakehouseMonitorRequest.java new file mode 100755 index 000000000..f03af68cd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetLakehouseMonitorRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** Get a table monitor */ +@Generated +public class GetLakehouseMonitorRequest { + /** Full name of the table. */ + private String fullName; + + public GetLakehouseMonitorRequest setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetLakehouseMonitorRequest that = (GetLakehouseMonitorRequest) o; + return Objects.equals(fullName, that.fullName); + } + + @Override + public int hashCode() { + return Objects.hash(fullName); + } + + @Override + public String toString() { + return new ToStringer(GetLakehouseMonitorRequest.class).add("fullName", fullName).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMonitorsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMonitorsAPI.java new file mode 100755 index 000000000..356fcdd3b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMonitorsAPI.java @@ -0,0 +1,135 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * A monitor computes and monitors data or model quality metrics for a table over time. It generates + * metrics tables and a dashboard that you can use to monitor table health and set alerts. + * + *

Most write operations require the user to be the owner of the table (or its parent schema or + * parent catalog). Viewing the dashboard, computed metrics, or monitor configuration only requires + * the user to have **SELECT** privileges on the table (along with **USE_SCHEMA** and + * **USE_CATALOG**). + */ +@Generated +public class LakehouseMonitorsAPI { + private static final Logger LOG = LoggerFactory.getLogger(LakehouseMonitorsAPI.class); + + private final LakehouseMonitorsService impl; + + /** Regular-use constructor */ + public LakehouseMonitorsAPI(ApiClient apiClient) { + impl = new LakehouseMonitorsImpl(apiClient); + } + + /** Constructor for mocks */ + public LakehouseMonitorsAPI(LakehouseMonitorsService mock) { + impl = mock; + } + + public MonitorInfo create(String fullName, String assetsDir, String outputSchemaName) { + return create( + new CreateMonitor() + .setFullName(fullName) + .setAssetsDir(assetsDir) + .setOutputSchemaName(outputSchemaName)); + } + + /** + * Create a table monitor. + * + *

Creates a new monitor for the specified table. + * + *

The caller must either: 1. be an owner of the table's parent catalog, have **USE_SCHEMA** on + * the table's parent schema, and have **SELECT** access on the table 2. have **USE_CATALOG** on + * the table's parent catalog, be an owner of the table's parent schema, and have **SELECT** + * access on the table. 3. have the following permissions: - **USE_CATALOG** on the table's parent + * catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table. + * + *

Workspace assets, such as the dashboard, will be created in the workspace where this call + * was made. + */ + public MonitorInfo create(CreateMonitor request) { + return impl.create(request); + } + + public void delete(String fullName) { + delete(new DeleteLakehouseMonitorRequest().setFullName(fullName)); + } + + /** + * Delete a table monitor. + * + *

Deletes a monitor for the specified table. + * + *

The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** + * on the table's parent catalog and be an owner of the table's parent schema 3. have the + * following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the + * table's parent schema - be an owner of the table. + * + *

Additionally, the call must be made from the workspace where the monitor was created. + * + *

Note that the metric tables and dashboard will not be deleted as part of this call; those + * assets must be manually cleaned up (if desired). + */ + public void delete(DeleteLakehouseMonitorRequest request) { + impl.delete(request); + } + + public MonitorInfo get(String fullName) { + return get(new GetLakehouseMonitorRequest().setFullName(fullName)); + } + + /** + * Get a table monitor. + * + *

Gets a monitor for the specified table. + * + *

The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** + * on the table's parent catalog and be an owner of the table's parent schema. 3. have the + * following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the + * table's parent schema - **SELECT** privilege on the table. + * + *

The returned information includes configuration values, as well as information on assets + * created by the monitor. Some information (e.g., dashboard) may be filtered out if the caller is + * in a different workspace than where the monitor was created. + */ + public MonitorInfo get(GetLakehouseMonitorRequest request) { + return impl.get(request); + } + + public MonitorInfo update(String fullName, String assetsDir, String outputSchemaName) { + return update( + new UpdateMonitor() + .setFullName(fullName) + .setAssetsDir(assetsDir) + .setOutputSchemaName(outputSchemaName)); + } + + /** + * Update a table monitor. + * + *

Updates a monitor for the specified table. + * + *

The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** + * on the table's parent catalog and be an owner of the table's parent schema 3. have the + * following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the + * table's parent schema - be an owner of the table. + * + *

Additionally, the call must be made from the workspace where the monitor was created, and + * the caller must be the original creator of the monitor. + * + *

Certain configuration fields, such as output asset identifiers, cannot be updated. + */ + public MonitorInfo update(UpdateMonitor request) { + return impl.update(request); + } + + public LakehouseMonitorsService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMonitorsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMonitorsImpl.java new file mode 100755 index 000000000..5f885fdf3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMonitorsImpl.java @@ -0,0 +1,50 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of LakehouseMonitors */ +@Generated +class LakehouseMonitorsImpl implements LakehouseMonitorsService { + private final ApiClient apiClient; + + public LakehouseMonitorsImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public MonitorInfo create(CreateMonitor request) { + String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor", request.getFullName()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.POST(path, request, MonitorInfo.class, headers); + } + + @Override + public void delete(DeleteLakehouseMonitorRequest request) { + String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor", request.getFullName()); + Map headers = new HashMap<>(); + apiClient.DELETE(path, request, Void.class, headers); + } + + @Override + public MonitorInfo get(GetLakehouseMonitorRequest request) { + String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor", request.getFullName()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, MonitorInfo.class, headers); + } + + @Override + public MonitorInfo update(UpdateMonitor request) { + String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor", request.getFullName()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.PUT(path, request, MonitorInfo.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMonitorsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMonitorsService.java new file mode 100755 index 000000000..66c6e72d3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMonitorsService.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +/** + * A monitor computes and monitors data or model quality metrics for a table over time. It generates + * metrics tables and a dashboard that you can use to monitor table health and set alerts. + * + *

Most write operations require the user to be the owner of the table (or its parent schema or + * parent catalog). Viewing the dashboard, computed metrics, or monitor configuration only requires + * the user to have **SELECT** privileges on the table (along with **USE_SCHEMA** and + * **USE_CATALOG**). + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface LakehouseMonitorsService { + /** + * Create a table monitor. + * + *

Creates a new monitor for the specified table. + * + *

The caller must either: 1. be an owner of the table's parent catalog, have **USE_SCHEMA** on + * the table's parent schema, and have **SELECT** access on the table 2. have **USE_CATALOG** on + * the table's parent catalog, be an owner of the table's parent schema, and have **SELECT** + * access on the table. 3. have the following permissions: - **USE_CATALOG** on the table's parent + * catalog - **USE_SCHEMA** on the table's parent schema - be an owner of the table. + * + *

Workspace assets, such as the dashboard, will be created in the workspace where this call + * was made. + */ + MonitorInfo create(CreateMonitor createMonitor); + + /** + * Delete a table monitor. + * + *

Deletes a monitor for the specified table. + * + *

The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** + * on the table's parent catalog and be an owner of the table's parent schema 3. have the + * following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the + * table's parent schema - be an owner of the table. + * + *

Additionally, the call must be made from the workspace where the monitor was created. + * + *

Note that the metric tables and dashboard will not be deleted as part of this call; those + * assets must be manually cleaned up (if desired). + */ + void delete(DeleteLakehouseMonitorRequest deleteLakehouseMonitorRequest); + + /** + * Get a table monitor. + * + *

Gets a monitor for the specified table. + * + *

The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** + * on the table's parent catalog and be an owner of the table's parent schema. 3. have the + * following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the + * table's parent schema - **SELECT** privilege on the table. + * + *

The returned information includes configuration values, as well as information on assets + * created by the monitor. Some information (e.g., dashboard) may be filtered out if the caller is + * in a different workspace than where the monitor was created. + */ + MonitorInfo get(GetLakehouseMonitorRequest getLakehouseMonitorRequest); + + /** + * Update a table monitor. + * + *

Updates a monitor for the specified table. + * + *

The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** + * on the table's parent catalog and be an owner of the table's parent schema 3. have the + * following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the + * table's parent schema - be an owner of the table. + * + *

Additionally, the call must be made from the workspace where the monitor was created, and + * the caller must be the original creator of the monitor. + * + *

Certain configuration fields, such as output asset identifiers, cannot be updated. + */ + MonitorInfo update(UpdateMonitor updateMonitor); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedule.java new file mode 100755 index 000000000..1c355bd46 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedule.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class MonitorCronSchedule { + /** Whether the schedule is paused or not */ + @JsonProperty("pause_status") + private MonitorCronSchedulePauseStatus pauseStatus; + + /** A cron expression using quartz syntax that describes the schedule for a job. */ + @JsonProperty("quartz_cron_expression") + private String quartzCronExpression; + + /** A Java timezone id. The schedule for a job will be resolved with respect to this timezone. */ + @JsonProperty("timezone_id") + private String timezoneId; + + public MonitorCronSchedule setPauseStatus(MonitorCronSchedulePauseStatus pauseStatus) { + this.pauseStatus = pauseStatus; + return this; + } + + public MonitorCronSchedulePauseStatus getPauseStatus() { + return pauseStatus; + } + + public MonitorCronSchedule setQuartzCronExpression(String quartzCronExpression) { + this.quartzCronExpression = quartzCronExpression; + return this; + } + + public String getQuartzCronExpression() { + return quartzCronExpression; + } + + public MonitorCronSchedule setTimezoneId(String timezoneId) { + this.timezoneId = timezoneId; + return this; + } + + public String getTimezoneId() { + return timezoneId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MonitorCronSchedule that = (MonitorCronSchedule) o; + return Objects.equals(pauseStatus, that.pauseStatus) + && Objects.equals(quartzCronExpression, that.quartzCronExpression) + && Objects.equals(timezoneId, that.timezoneId); + } + + @Override + public int hashCode() { + return Objects.hash(pauseStatus, quartzCronExpression, timezoneId); + } + + @Override + public String toString() { + return new ToStringer(MonitorCronSchedule.class) + .add("pauseStatus", pauseStatus) + .add("quartzCronExpression", quartzCronExpression) + .add("timezoneId", timezoneId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedulePauseStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedulePauseStatus.java new file mode 100755 index 000000000..5b59f5385 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedulePauseStatus.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +/** Whether the schedule is paused or not */ +@Generated +public enum MonitorCronSchedulePauseStatus { + PAUSED, + UNPAUSED, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCustomMetric.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCustomMetric.java new file mode 100755 index 000000000..2167187ba --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCustomMetric.java @@ -0,0 +1,111 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class MonitorCustomMetric { + /** + * Jinja template for a SQL expression that specifies how to compute the metric. See [create + * metric definition]. + * + *

[create metric definition]: + * https://docs.databricks.com/en/lakehouse-monitoring/custom-metrics.html#create-definition + */ + @JsonProperty("definition") + private String definition; + + /** Columns on the monitored table to apply the custom metrics to. */ + @JsonProperty("input_columns") + private Collection inputColumns; + + /** Name of the custom metric. */ + @JsonProperty("name") + private String name; + + /** The output type of the custom metric. */ + @JsonProperty("output_data_type") + private String outputDataType; + + /** The type of the custom metric. */ + @JsonProperty("type") + private MonitorCustomMetricType typeValue; + + public MonitorCustomMetric setDefinition(String definition) { + this.definition = definition; + return this; + } + + public String getDefinition() { + return definition; + } + + public MonitorCustomMetric setInputColumns(Collection inputColumns) { + this.inputColumns = inputColumns; + return this; + } + + public Collection getInputColumns() { + return inputColumns; + } + + public MonitorCustomMetric setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public MonitorCustomMetric setOutputDataType(String outputDataType) { + this.outputDataType = outputDataType; + return this; + } + + public String getOutputDataType() { + return outputDataType; + } + + public MonitorCustomMetric setType(MonitorCustomMetricType typeValue) { + this.typeValue = typeValue; + return this; + } + + public MonitorCustomMetricType getType() { + return typeValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MonitorCustomMetric that = (MonitorCustomMetric) o; + return Objects.equals(definition, that.definition) + && Objects.equals(inputColumns, that.inputColumns) + && Objects.equals(name, that.name) + && Objects.equals(outputDataType, that.outputDataType) + && Objects.equals(typeValue, that.typeValue); + } + + @Override + public int hashCode() { + return Objects.hash(definition, inputColumns, name, outputDataType, typeValue); + } + + @Override + public String toString() { + return new ToStringer(MonitorCustomMetric.class) + .add("definition", definition) + .add("inputColumns", inputColumns) + .add("name", name) + .add("outputDataType", outputDataType) + .add("typeValue", typeValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCustomMetricType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCustomMetricType.java new file mode 100755 index 000000000..391515d79 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCustomMetricType.java @@ -0,0 +1,15 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +/** The type of the custom metric. */ +@Generated +public enum MonitorCustomMetricType { + CUSTOM_METRIC_TYPE_AGGREGATE, + CUSTOM_METRIC_TYPE_DERIVED, + CUSTOM_METRIC_TYPE_DRIFT, + MONITOR_STATUS_ERROR, + MONITOR_STATUS_FAILED, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDataClassificationConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDataClassificationConfig.java new file mode 100755 index 000000000..814e2b9ce --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDataClassificationConfig.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class MonitorDataClassificationConfig { + /** Whether data classification is enabled. */ + @JsonProperty("enabled") + private Boolean enabled; + + public MonitorDataClassificationConfig setEnabled(Boolean enabled) { + this.enabled = enabled; + return this; + } + + public Boolean getEnabled() { + return enabled; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MonitorDataClassificationConfig that = (MonitorDataClassificationConfig) o; + return Objects.equals(enabled, that.enabled); + } + + @Override + public int hashCode() { + return Objects.hash(enabled); + } + + @Override + public String toString() { + return new ToStringer(MonitorDataClassificationConfig.class).add("enabled", enabled).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDestinations.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDestinations.java new file mode 100755 index 000000000..1354e0944 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDestinations.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class MonitorDestinations { + /** The list of email addresses to send the notification to. */ + @JsonProperty("email_addresses") + private Collection emailAddresses; + + public MonitorDestinations setEmailAddresses(Collection emailAddresses) { + this.emailAddresses = emailAddresses; + return this; + } + + public Collection getEmailAddresses() { + return emailAddresses; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MonitorDestinations that = (MonitorDestinations) o; + return Objects.equals(emailAddresses, that.emailAddresses); + } + + @Override + public int hashCode() { + return Objects.hash(emailAddresses); + } + + @Override + public String toString() { + return new ToStringer(MonitorDestinations.class) + .add("emailAddresses", emailAddresses) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProfileType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProfileType.java new file mode 100755 index 000000000..875aa0a3d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProfileType.java @@ -0,0 +1,145 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class MonitorInferenceLogProfileType { + /** + * List of granularities to use when aggregating data into time windows based on their timestamp. + */ + @JsonProperty("granularities") + private Collection granularities; + + /** Column of the model label. */ + @JsonProperty("label_col") + private String labelCol; + + /** Column of the model id or version. */ + @JsonProperty("model_id_col") + private String modelIdCol; + + /** Column of the model prediction. */ + @JsonProperty("prediction_col") + private String predictionCol; + + /** Column of the model prediction probabilities. */ + @JsonProperty("prediction_proba_col") + private String predictionProbaCol; + + /** Problem type the model aims to solve. */ + @JsonProperty("problem_type") + private MonitorInferenceLogProfileTypeProblemType problemType; + + /** Column of the timestamp of predictions. */ + @JsonProperty("timestamp_col") + private String timestampCol; + + public MonitorInferenceLogProfileType setGranularities(Collection granularities) { + this.granularities = granularities; + return this; + } + + public Collection getGranularities() { + return granularities; + } + + public MonitorInferenceLogProfileType setLabelCol(String labelCol) { + this.labelCol = labelCol; + return this; + } + + public String getLabelCol() { + return labelCol; + } + + public MonitorInferenceLogProfileType setModelIdCol(String modelIdCol) { + this.modelIdCol = modelIdCol; + return this; + } + + public String getModelIdCol() { + return modelIdCol; + } + + public MonitorInferenceLogProfileType setPredictionCol(String predictionCol) { + this.predictionCol = predictionCol; + return this; + } + + public String getPredictionCol() { + return predictionCol; + } + + public MonitorInferenceLogProfileType setPredictionProbaCol(String predictionProbaCol) { + this.predictionProbaCol = predictionProbaCol; + return this; + } + + public String getPredictionProbaCol() { + return predictionProbaCol; + } + + public MonitorInferenceLogProfileType setProblemType( + MonitorInferenceLogProfileTypeProblemType problemType) { + this.problemType = problemType; + return this; + } + + public MonitorInferenceLogProfileTypeProblemType getProblemType() { + return problemType; + } + + public MonitorInferenceLogProfileType setTimestampCol(String timestampCol) { + this.timestampCol = timestampCol; + return this; + } + + public String getTimestampCol() { + return timestampCol; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MonitorInferenceLogProfileType that = (MonitorInferenceLogProfileType) o; + return Objects.equals(granularities, that.granularities) + && Objects.equals(labelCol, that.labelCol) + && Objects.equals(modelIdCol, that.modelIdCol) + && Objects.equals(predictionCol, that.predictionCol) + && Objects.equals(predictionProbaCol, that.predictionProbaCol) + && Objects.equals(problemType, that.problemType) + && Objects.equals(timestampCol, that.timestampCol); + } + + @Override + public int hashCode() { + return Objects.hash( + granularities, + labelCol, + modelIdCol, + predictionCol, + predictionProbaCol, + problemType, + timestampCol); + } + + @Override + public String toString() { + return new ToStringer(MonitorInferenceLogProfileType.class) + .add("granularities", granularities) + .add("labelCol", labelCol) + .add("modelIdCol", modelIdCol) + .add("predictionCol", predictionCol) + .add("predictionProbaCol", predictionProbaCol) + .add("problemType", problemType) + .add("timestampCol", timestampCol) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProfileTypeProblemType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProfileTypeProblemType.java new file mode 100755 index 000000000..11a36da5e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProfileTypeProblemType.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +/** Problem type the model aims to solve. */ +@Generated +public enum MonitorInferenceLogProfileTypeProblemType { + PROBLEM_TYPE_CLASSIFICATION, + PROBLEM_TYPE_REGRESSION, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfo.java new file mode 100755 index 000000000..115a90007 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfo.java @@ -0,0 +1,339 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class MonitorInfo { + /** The directory to store monitoring assets (e.g. dashboard, metric tables). */ + @JsonProperty("assets_dir") + private String assetsDir; + + /** + * Name of the baseline table from which drift metrics are computed from. Columns in the monitored + * table should also be present in the baseline table. + */ + @JsonProperty("baseline_table_name") + private String baselineTableName; + + /** + * Custom metrics to compute on the monitored table. These can be aggregate metrics, derived + * metrics (from already computed aggregate metrics), or drift metrics (comparing metrics across + * time windows). + */ + @JsonProperty("custom_metrics") + private Collection customMetrics; + + /** The ID of the generated dashboard. */ + @JsonProperty("dashboard_id") + private String dashboardId; + + /** The data classification config for the monitor. */ + @JsonProperty("data_classification_config") + private MonitorDataClassificationConfig dataClassificationConfig; + + /** + * The full name of the drift metrics table. Format: + * __catalog_name__.__schema_name__.__table_name__. + */ + @JsonProperty("drift_metrics_table_name") + private String driftMetricsTableName; + + /** Configuration for monitoring inference logs. */ + @JsonProperty("inference_log") + private MonitorInferenceLogProfileType inferenceLog; + + /** The latest failure message of the monitor (if any). */ + @JsonProperty("latest_monitor_failure_msg") + private String latestMonitorFailureMsg; + + /** The version of the monitor config (e.g. 1,2,3). If negative, the monitor may be corrupted. */ + @JsonProperty("monitor_version") + private String monitorVersion; + + /** The notification settings for the monitor. */ + @JsonProperty("notifications") + private Collection notifications; + + /** Schema where output metric tables are created. */ + @JsonProperty("output_schema_name") + private String outputSchemaName; + + /** + * The full name of the profile metrics table. Format: + * __catalog_name__.__schema_name__.__table_name__. + */ + @JsonProperty("profile_metrics_table_name") + private String profileMetricsTableName; + + /** The schedule for automatically updating and refreshing metric tables. */ + @JsonProperty("schedule") + private MonitorCronSchedule schedule; + + /** + * List of column expressions to slice data with for targeted analysis. The data is grouped by + * each expression independently, resulting in a separate slice for each predicate and its + * complements. For high-cardinality columns, only the top 100 unique values by frequency will + * generate slices. + */ + @JsonProperty("slicing_exprs") + private Collection slicingExprs; + + /** Configuration for monitoring snapshot tables. */ + @JsonProperty("snapshot") + private Object snapshot; + + /** The status of the monitor. */ + @JsonProperty("status") + private MonitorInfoStatus status; + + /** + * The full name of the table to monitor. Format: __catalog_name__.__schema_name__.__table_name__. + */ + @JsonProperty("table_name") + private String tableName; + + /** Configuration for monitoring time series tables. */ + @JsonProperty("time_series") + private MonitorTimeSeriesProfileType timeSeries; + + public MonitorInfo setAssetsDir(String assetsDir) { + this.assetsDir = assetsDir; + return this; + } + + public String getAssetsDir() { + return assetsDir; + } + + public MonitorInfo setBaselineTableName(String baselineTableName) { + this.baselineTableName = baselineTableName; + return this; + } + + public String getBaselineTableName() { + return baselineTableName; + } + + public MonitorInfo setCustomMetrics(Collection customMetrics) { + this.customMetrics = customMetrics; + return this; + } + + public Collection getCustomMetrics() { + return customMetrics; + } + + public MonitorInfo setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public MonitorInfo setDataClassificationConfig( + MonitorDataClassificationConfig dataClassificationConfig) { + this.dataClassificationConfig = dataClassificationConfig; + return this; + } + + public MonitorDataClassificationConfig getDataClassificationConfig() { + return dataClassificationConfig; + } + + public MonitorInfo setDriftMetricsTableName(String driftMetricsTableName) { + this.driftMetricsTableName = driftMetricsTableName; + return this; + } + + public String getDriftMetricsTableName() { + return driftMetricsTableName; + } + + public MonitorInfo setInferenceLog(MonitorInferenceLogProfileType inferenceLog) { + this.inferenceLog = inferenceLog; + return this; + } + + public MonitorInferenceLogProfileType getInferenceLog() { + return inferenceLog; + } + + public MonitorInfo setLatestMonitorFailureMsg(String latestMonitorFailureMsg) { + this.latestMonitorFailureMsg = latestMonitorFailureMsg; + return this; + } + + public String getLatestMonitorFailureMsg() { + return latestMonitorFailureMsg; + } + + public MonitorInfo setMonitorVersion(String monitorVersion) { + this.monitorVersion = monitorVersion; + return this; + } + + public String getMonitorVersion() { + return monitorVersion; + } + + public MonitorInfo setNotifications(Collection notifications) { + this.notifications = notifications; + return this; + } + + public Collection getNotifications() { + return notifications; + } + + public MonitorInfo setOutputSchemaName(String outputSchemaName) { + this.outputSchemaName = outputSchemaName; + return this; + } + + public String getOutputSchemaName() { + return outputSchemaName; + } + + public MonitorInfo setProfileMetricsTableName(String profileMetricsTableName) { + this.profileMetricsTableName = profileMetricsTableName; + return this; + } + + public String getProfileMetricsTableName() { + return profileMetricsTableName; + } + + public MonitorInfo setSchedule(MonitorCronSchedule schedule) { + this.schedule = schedule; + return this; + } + + public MonitorCronSchedule getSchedule() { + return schedule; + } + + public MonitorInfo setSlicingExprs(Collection slicingExprs) { + this.slicingExprs = slicingExprs; + return this; + } + + public Collection getSlicingExprs() { + return slicingExprs; + } + + public MonitorInfo setSnapshot(Object snapshot) { + this.snapshot = snapshot; + return this; + } + + public Object getSnapshot() { + return snapshot; + } + + public MonitorInfo setStatus(MonitorInfoStatus status) { + this.status = status; + return this; + } + + public MonitorInfoStatus getStatus() { + return status; + } + + public MonitorInfo setTableName(String tableName) { + this.tableName = tableName; + return this; + } + + public String getTableName() { + return tableName; + } + + public MonitorInfo setTimeSeries(MonitorTimeSeriesProfileType timeSeries) { + this.timeSeries = timeSeries; + return this; + } + + public MonitorTimeSeriesProfileType getTimeSeries() { + return timeSeries; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MonitorInfo that = (MonitorInfo) o; + return Objects.equals(assetsDir, that.assetsDir) + && Objects.equals(baselineTableName, that.baselineTableName) + && Objects.equals(customMetrics, that.customMetrics) + && Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(dataClassificationConfig, that.dataClassificationConfig) + && Objects.equals(driftMetricsTableName, that.driftMetricsTableName) + && Objects.equals(inferenceLog, that.inferenceLog) + && Objects.equals(latestMonitorFailureMsg, that.latestMonitorFailureMsg) + && Objects.equals(monitorVersion, that.monitorVersion) + && Objects.equals(notifications, that.notifications) + && Objects.equals(outputSchemaName, that.outputSchemaName) + && Objects.equals(profileMetricsTableName, that.profileMetricsTableName) + && Objects.equals(schedule, that.schedule) + && Objects.equals(slicingExprs, that.slicingExprs) + && Objects.equals(snapshot, that.snapshot) + && Objects.equals(status, that.status) + && Objects.equals(tableName, that.tableName) + && Objects.equals(timeSeries, that.timeSeries); + } + + @Override + public int hashCode() { + return Objects.hash( + assetsDir, + baselineTableName, + customMetrics, + dashboardId, + dataClassificationConfig, + driftMetricsTableName, + inferenceLog, + latestMonitorFailureMsg, + monitorVersion, + notifications, + outputSchemaName, + profileMetricsTableName, + schedule, + slicingExprs, + snapshot, + status, + tableName, + timeSeries); + } + + @Override + public String toString() { + return new ToStringer(MonitorInfo.class) + .add("assetsDir", assetsDir) + .add("baselineTableName", baselineTableName) + .add("customMetrics", customMetrics) + .add("dashboardId", dashboardId) + .add("dataClassificationConfig", dataClassificationConfig) + .add("driftMetricsTableName", driftMetricsTableName) + .add("inferenceLog", inferenceLog) + .add("latestMonitorFailureMsg", latestMonitorFailureMsg) + .add("monitorVersion", monitorVersion) + .add("notifications", notifications) + .add("outputSchemaName", outputSchemaName) + .add("profileMetricsTableName", profileMetricsTableName) + .add("schedule", schedule) + .add("slicingExprs", slicingExprs) + .add("snapshot", snapshot) + .add("status", status) + .add("tableName", tableName) + .add("timeSeries", timeSeries) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfoStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfoStatus.java new file mode 100755 index 000000000..51a8f409c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfoStatus.java @@ -0,0 +1,15 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +/** The status of the monitor. */ +@Generated +public enum MonitorInfoStatus { + MONITOR_STATUS_ACTIVE, + MONITOR_STATUS_DELETE_PENDING, + MONITOR_STATUS_ERROR, + MONITOR_STATUS_FAILED, + MONITOR_STATUS_PENDING, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorNotificationsConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorNotificationsConfig.java new file mode 100755 index 000000000..68666f166 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorNotificationsConfig.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class MonitorNotificationsConfig { + /** Who to send notifications to on monitor failure. */ + @JsonProperty("on_failure") + private MonitorDestinations onFailure; + + public MonitorNotificationsConfig setOnFailure(MonitorDestinations onFailure) { + this.onFailure = onFailure; + return this; + } + + public MonitorDestinations getOnFailure() { + return onFailure; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MonitorNotificationsConfig that = (MonitorNotificationsConfig) o; + return Objects.equals(onFailure, that.onFailure); + } + + @Override + public int hashCode() { + return Objects.hash(onFailure); + } + + @Override + public String toString() { + return new ToStringer(MonitorNotificationsConfig.class).add("onFailure", onFailure).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeriesProfileType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeriesProfileType.java new file mode 100755 index 000000000..ee757bd08 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeriesProfileType.java @@ -0,0 +1,65 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class MonitorTimeSeriesProfileType { + /** + * List of granularities to use when aggregating data into time windows based on their timestamp. + */ + @JsonProperty("granularities") + private Collection granularities; + + /** + * The timestamp column. This must be timestamp types or convertible to timestamp types using the + * pyspark to_timestamp function. + */ + @JsonProperty("timestamp_col") + private String timestampCol; + + public MonitorTimeSeriesProfileType setGranularities(Collection granularities) { + this.granularities = granularities; + return this; + } + + public Collection getGranularities() { + return granularities; + } + + public MonitorTimeSeriesProfileType setTimestampCol(String timestampCol) { + this.timestampCol = timestampCol; + return this; + } + + public String getTimestampCol() { + return timestampCol; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MonitorTimeSeriesProfileType that = (MonitorTimeSeriesProfileType) o; + return Objects.equals(granularities, that.granularities) + && Objects.equals(timestampCol, that.timestampCol); + } + + @Override + public int hashCode() { + return Objects.hash(granularities, timestampCol); + } + + @Override + public String toString() { + return new ToStringer(MonitorTimeSeriesProfileType.class) + .add("granularities", granularities) + .add("timestampCol", timestampCol) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableExistsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableExistsResponse.java new file mode 100755 index 000000000..98eb5fb59 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableExistsResponse.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class TableExistsResponse { + /** Whether the table exists or not. */ + @JsonProperty("table_exists") + private Boolean tableExists; + + public TableExistsResponse setTableExists(Boolean tableExists) { + this.tableExists = tableExists; + return this; + } + + public Boolean getTableExists() { + return tableExists; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TableExistsResponse that = (TableExistsResponse) o; + return Objects.equals(tableExists, that.tableExists); + } + + @Override + public int hashCode() { + return Objects.hash(tableExists); + } + + @Override + public String toString() { + return new ToStringer(TableExistsResponse.class).add("tableExists", tableExists).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java index 3b65c8ab1..ab00a021b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java @@ -49,6 +49,25 @@ public void delete(DeleteTableRequest request) { impl.delete(request); } + public TableExistsResponse exists(String fullName) { + return exists(new ExistsRequest().setFullName(fullName)); + } + + /** + * Get boolean reflecting if table exists. + * + *

Gets if a table exists in the metastore for a specific catalog and schema. The caller must + * satisfy one of the following requirements: * Be a metastore admin * Be the owner of the parent + * catalog * Be the owner of the parent schema and have the USE_CATALOG privilege on the parent + * catalog * Have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** + * privilege on the parent schema, and either be the table owner or have the SELECT privilege on + * the table. * Have BROWSE privilege on the parent catalog * Have BROWSE privilege on the parent + * schema. + */ + public TableExistsResponse exists(ExistsRequest request) { + return impl.exists(request); + } + public TableInfo get(String fullName) { return get(new GetTableRequest().setFullName(fullName)); } @@ -56,10 +75,11 @@ public TableInfo get(String fullName) { /** * Get a table. * - *

Gets a table from the metastore for a specific catalog and schema. The caller must be a - * metastore admin, be the owner of the table and have the **USE_CATALOG** privilege on the parent - * catalog and the **USE_SCHEMA** privilege on the parent schema, or be the owner of the table and - * have the **SELECT** privilege on it as well. + *

Gets a table from the metastore for a specific catalog and schema. The caller must satisfy + * one of the following requirements: * Be a metastore admin * Be the owner of the parent catalog + * * Be the owner of the parent schema and have the USE_CATALOG privilege on the parent catalog * + * Have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on + * the parent schema, and either be the table owner or have the SELECT privilege on the table. */ public TableInfo get(GetTableRequest request) { return impl.get(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesImpl.java index 53b043863..3da035105 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesImpl.java @@ -23,6 +23,14 @@ public void delete(DeleteTableRequest request) { apiClient.DELETE(path, request, Void.class, headers); } + @Override + public TableExistsResponse exists(ExistsRequest request) { + String path = String.format("/api/2.1/unity-catalog/tables/%s/exists", request.getFullName()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, TableExistsResponse.class, headers); + } + @Override public TableInfo get(GetTableRequest request) { String path = String.format("/api/2.1/unity-catalog/tables/%s", request.getFullName()); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java index c5c016f49..54c506478 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java @@ -29,13 +29,27 @@ public interface TablesService { */ void delete(DeleteTableRequest deleteTableRequest); + /** + * Get boolean reflecting if table exists. + * + *

Gets if a table exists in the metastore for a specific catalog and schema. The caller must + * satisfy one of the following requirements: * Be a metastore admin * Be the owner of the parent + * catalog * Be the owner of the parent schema and have the USE_CATALOG privilege on the parent + * catalog * Have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** + * privilege on the parent schema, and either be the table owner or have the SELECT privilege on + * the table. * Have BROWSE privilege on the parent catalog * Have BROWSE privilege on the parent + * schema. + */ + TableExistsResponse exists(ExistsRequest existsRequest); + /** * Get a table. * - *

Gets a table from the metastore for a specific catalog and schema. The caller must be a - * metastore admin, be the owner of the table and have the **USE_CATALOG** privilege on the parent - * catalog and the **USE_SCHEMA** privilege on the parent schema, or be the owner of the table and - * have the **SELECT** privilege on it as well. + *

Gets a table from the metastore for a specific catalog and schema. The caller must satisfy + * one of the following requirements: * Be a metastore admin * Be the owner of the parent catalog + * * Be the owner of the parent schema and have the USE_CATALOG privilege on the parent catalog * + * Have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on + * the parent schema, and either be the table owner or have the SELECT privilege on the table. */ TableInfo get(GetTableRequest getTableRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMonitor.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMonitor.java new file mode 100755 index 000000000..e4475bb2e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMonitor.java @@ -0,0 +1,234 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class UpdateMonitor { + /** The directory to store monitoring assets (e.g. dashboard, metric tables). */ + @JsonProperty("assets_dir") + private String assetsDir; + + /** + * Name of the baseline table from which drift metrics are computed from. Columns in the monitored + * table should also be present in the baseline table. + */ + @JsonProperty("baseline_table_name") + private String baselineTableName; + + /** + * Custom metrics to compute on the monitored table. These can be aggregate metrics, derived + * metrics (from already computed aggregate metrics), or drift metrics (comparing metrics across + * time windows). + */ + @JsonProperty("custom_metrics") + private Collection customMetrics; + + /** The data classification config for the monitor. */ + @JsonProperty("data_classification_config") + private MonitorDataClassificationConfig dataClassificationConfig; + + /** Full name of the table. */ + private String fullName; + + /** Configuration for monitoring inference logs. */ + @JsonProperty("inference_log") + private MonitorInferenceLogProfileType inferenceLog; + + /** The notification settings for the monitor. */ + @JsonProperty("notifications") + private Collection notifications; + + /** Schema where output metric tables are created. */ + @JsonProperty("output_schema_name") + private String outputSchemaName; + + /** The schedule for automatically updating and refreshing metric tables. */ + @JsonProperty("schedule") + private MonitorCronSchedule schedule; + + /** + * List of column expressions to slice data with for targeted analysis. The data is grouped by + * each expression independently, resulting in a separate slice for each predicate and its + * complements. For high-cardinality columns, only the top 100 unique values by frequency will + * generate slices. + */ + @JsonProperty("slicing_exprs") + private Collection slicingExprs; + + /** Configuration for monitoring snapshot tables. */ + @JsonProperty("snapshot") + private Object snapshot; + + /** Configuration for monitoring time series tables. */ + @JsonProperty("time_series") + private MonitorTimeSeriesProfileType timeSeries; + + public UpdateMonitor setAssetsDir(String assetsDir) { + this.assetsDir = assetsDir; + return this; + } + + public String getAssetsDir() { + return assetsDir; + } + + public UpdateMonitor setBaselineTableName(String baselineTableName) { + this.baselineTableName = baselineTableName; + return this; + } + + public String getBaselineTableName() { + return baselineTableName; + } + + public UpdateMonitor setCustomMetrics(Collection customMetrics) { + this.customMetrics = customMetrics; + return this; + } + + public Collection getCustomMetrics() { + return customMetrics; + } + + public UpdateMonitor setDataClassificationConfig( + MonitorDataClassificationConfig dataClassificationConfig) { + this.dataClassificationConfig = dataClassificationConfig; + return this; + } + + public MonitorDataClassificationConfig getDataClassificationConfig() { + return dataClassificationConfig; + } + + public UpdateMonitor setFullName(String fullName) { + this.fullName = fullName; + return this; + } + + public String getFullName() { + return fullName; + } + + public UpdateMonitor setInferenceLog(MonitorInferenceLogProfileType inferenceLog) { + this.inferenceLog = inferenceLog; + return this; + } + + public MonitorInferenceLogProfileType getInferenceLog() { + return inferenceLog; + } + + public UpdateMonitor setNotifications(Collection notifications) { + this.notifications = notifications; + return this; + } + + public Collection getNotifications() { + return notifications; + } + + public UpdateMonitor setOutputSchemaName(String outputSchemaName) { + this.outputSchemaName = outputSchemaName; + return this; + } + + public String getOutputSchemaName() { + return outputSchemaName; + } + + public UpdateMonitor setSchedule(MonitorCronSchedule schedule) { + this.schedule = schedule; + return this; + } + + public MonitorCronSchedule getSchedule() { + return schedule; + } + + public UpdateMonitor setSlicingExprs(Collection slicingExprs) { + this.slicingExprs = slicingExprs; + return this; + } + + public Collection getSlicingExprs() { + return slicingExprs; + } + + public UpdateMonitor setSnapshot(Object snapshot) { + this.snapshot = snapshot; + return this; + } + + public Object getSnapshot() { + return snapshot; + } + + public UpdateMonitor setTimeSeries(MonitorTimeSeriesProfileType timeSeries) { + this.timeSeries = timeSeries; + return this; + } + + public MonitorTimeSeriesProfileType getTimeSeries() { + return timeSeries; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateMonitor that = (UpdateMonitor) o; + return Objects.equals(assetsDir, that.assetsDir) + && Objects.equals(baselineTableName, that.baselineTableName) + && Objects.equals(customMetrics, that.customMetrics) + && Objects.equals(dataClassificationConfig, that.dataClassificationConfig) + && Objects.equals(fullName, that.fullName) + && Objects.equals(inferenceLog, that.inferenceLog) + && Objects.equals(notifications, that.notifications) + && Objects.equals(outputSchemaName, that.outputSchemaName) + && Objects.equals(schedule, that.schedule) + && Objects.equals(slicingExprs, that.slicingExprs) + && Objects.equals(snapshot, that.snapshot) + && Objects.equals(timeSeries, that.timeSeries); + } + + @Override + public int hashCode() { + return Objects.hash( + assetsDir, + baselineTableName, + customMetrics, + dataClassificationConfig, + fullName, + inferenceLog, + notifications, + outputSchemaName, + schedule, + slicingExprs, + snapshot, + timeSeries); + } + + @Override + public String toString() { + return new ToStringer(UpdateMonitor.class) + .add("assetsDir", assetsDir) + .add("baselineTableName", baselineTableName) + .add("customMetrics", customMetrics) + .add("dataClassificationConfig", dataClassificationConfig) + .add("fullName", fullName) + .add("inferenceLog", inferenceLog) + .add("notifications", notifications) + .add("outputSchemaName", outputSchemaName) + .add("schedule", schedule) + .add("slicingExprs", slicingExprs) + .add("snapshot", snapshot) + .add("timeSeries", timeSeries) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlRequest.java index 934047642..c16994224 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlRequest.java @@ -17,10 +17,7 @@ public class ClusterAccessControlRequest { @JsonProperty("permission_level") private ClusterPermissionLevel permissionLevel; - /** - * Application ID of an active service principal. Setting this field requires the - * `servicePrincipal/user` role. - */ + /** application ID of a service principal */ @JsonProperty("service_principal_name") private String servicePrincipalName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesAPI.java index c6323dd21..ff89273c2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesAPI.java @@ -12,10 +12,10 @@ * creation. Cluster policies have ACLs that limit their use to specific users and groups. * *

With cluster policies, you can: - Auto-install cluster libraries on the next restart by - * listing them in the policy's "libraries" field. - Limit users to creating clusters with the - * prescribed settings. - Simplify the user interface, enabling more users to create clusters, by - * fixing and hiding some fields. - Manage costs by setting limits on attributes that impact the - * hourly rate. + * listing them in the policy's "libraries" field (Public Preview). - Limit users to creating + * clusters with the prescribed settings. - Simplify the user interface, enabling more users to + * create clusters, by fixing and hiding some fields. - Manage costs by setting limits on attributes + * that impact the hourly rate. * *

Cluster policy permissions limit which policies a user can select in the Policy drop-down when * the user creates a cluster: - A user who has unrestricted cluster create permission can select diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesService.java index 5cb83f298..10654b89f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesService.java @@ -9,10 +9,10 @@ * creation. Cluster policies have ACLs that limit their use to specific users and groups. * *

With cluster policies, you can: - Auto-install cluster libraries on the next restart by - * listing them in the policy's "libraries" field. - Limit users to creating clusters with the - * prescribed settings. - Simplify the user interface, enabling more users to create clusters, by - * fixing and hiding some fields. - Manage costs by setting limits on attributes that impact the - * hourly rate. + * listing them in the policy's "libraries" field (Public Preview). - Limit users to creating + * clusters with the prescribed settings. - Simplify the user interface, enabling more users to + * create clusters, by fixing and hiding some fields. - Manage costs by setting limits on attributes + * that impact the hourly rate. * *

Cluster policy permissions limit which policies a user can select in the Policy drop-down when * the user creates a cluster: - A user who has unrestricted cluster create permission can select diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyAccessControlRequest.java index aef055912..651b6e1bb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPolicyAccessControlRequest.java @@ -17,10 +17,7 @@ public class ClusterPolicyAccessControlRequest { @JsonProperty("permission_level") private ClusterPolicyPermissionLevel permissionLevel; - /** - * Application ID of an active service principal. Setting this field requires the - * `servicePrincipal/user` role. - */ + /** application ID of a service principal */ @JsonProperty("service_principal_name") private String servicePrincipalName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java index 6670e979d..6bf0d8e66 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java @@ -147,7 +147,9 @@ public void changeOwner(String clusterId, String ownerUsername) { /** * Change cluster owner. * - *

Change the owner of the cluster. You must be an admin to perform this operation. + *

Change the owner of the cluster. You must be an admin and the cluster must be terminated to + * perform this operation. The service principal application ID can be supplied as an argument to + * `owner_username`. */ public void changeOwner(ChangeClusterOwner request) { impl.changeOwner(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersService.java index c47c7d30e..33158349d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersService.java @@ -36,7 +36,9 @@ public interface ClustersService { /** * Change cluster owner. * - *

Change the owner of the cluster. You must be an admin to perform this operation. + *

Change the owner of the cluster. You must be an admin and the cluster must be terminated to + * perform this operation. The service principal application ID can be supplied as an argument to + * `owner_username`. */ void changeOwner(ChangeClusterOwner changeClusterOwner); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreatePolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreatePolicy.java index 8de3541cf..8b44ccd98 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreatePolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreatePolicy.java @@ -23,7 +23,10 @@ public class CreatePolicy { @JsonProperty("description") private String description; - /** A list of libraries to be installed on the next cluster restart that uses this policy. */ + /** + * A list of libraries to be installed on the next cluster restart that uses this policy. The + * maximum number of libraries is 500. + */ @JsonProperty("libraries") private Collection libraries; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicy.java index 6dd048db9..d81f9cf4d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicy.java @@ -23,7 +23,10 @@ public class EditPolicy { @JsonProperty("description") private String description; - /** A list of libraries to be installed on the next cluster restart that uses this policy. */ + /** + * A list of libraries to be installed on the next cluster restart that uses this policy. The + * maximum number of libraries is 500. + */ @JsonProperty("libraries") private Collection libraries; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsAPI.java index cdc685f4c..54d1829f3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsAPI.java @@ -75,7 +75,7 @@ public GlobalInitScriptDetailsWithContent get(GetGlobalInitScriptRequest request * *

Get a list of all global init scripts for this workspace. This returns all properties for * each script but **not** the script contents. To retrieve the contents of a script, use the [get - * a global init script](#operation/get-script) operation. + * a global init script](:method:globalinitscripts/get) operation. */ public Iterable list() { return impl.list().getScripts(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsService.java index b1130d7eb..2a8b7f429 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsService.java @@ -44,7 +44,7 @@ public interface GlobalInitScriptsService { * *

Get a list of all global init scripts for this workspace. This returns all properties for * each script but **not** the script contents. To retrieve the contents of a script, use the [get - * a global init script](#operation/get-script) operation. + * a global init script](:method:globalinitscripts/get) operation. */ ListGlobalInitScriptsResponse list(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequest.java index 79c7449ad..94ea72be1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAccessControlRequest.java @@ -17,10 +17,7 @@ public class InstancePoolAccessControlRequest { @JsonProperty("permission_level") private InstancePoolPermissionLevel permissionLevel; - /** - * Application ID of an active service principal. Setting this field requires the - * `servicePrincipal/user` role. - */ + /** application ID of a service principal */ @JsonProperty("service_principal_name") private String servicePrincipalName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Policy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Policy.java index fff3fb236..de6167066 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Policy.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Policy.java @@ -41,7 +41,10 @@ public class Policy { @JsonProperty("is_default") private Boolean isDefault; - /** A list of libraries to be installed on the next cluster restart that uses this policy. */ + /** + * A list of libraries to be installed on the next cluster restart that uses this policy. The + * maximum number of libraries is 500. + */ @JsonProperty("libraries") private Collection libraries; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsAPI.java index 4c6fed0a2..be15a4ff9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsAPI.java @@ -34,10 +34,10 @@ public void addBlock(long handle, String data) { * Append data block. * *

Appends a block of data to the stream specified by the input handle. If the handle does not - * exist, this call will throw an exception with `RESOURCE_DOES_NOT_EXIST`. + * exist, this call will throw an exception with ``RESOURCE_DOES_NOT_EXIST``. * *

If the block of data exceeds 1 MB, this call will throw an exception with - * `MAX_BLOCK_SIZE_EXCEEDED`. + * ``MAX_BLOCK_SIZE_EXCEEDED``. */ public void addBlock(AddBlock request) { impl.addBlock(request); @@ -51,7 +51,7 @@ public void close(long handle) { * Close the stream. * *

Closes the stream specified by the input handle. If the handle does not exist, this call - * throws an exception with `RESOURCE_DOES_NOT_EXIST`. + * throws an exception with ``RESOURCE_DOES_NOT_EXIST``. */ public void close(Close request) { impl.close(request); @@ -66,12 +66,13 @@ public CreateResponse create(String path) { * *

Opens a stream to write to a file and returns a handle to this stream. There is a 10 minute * idle timeout on this handle. If a file or directory already exists on the given path and - * __overwrite__ is set to `false`, this call throws an exception with `RESOURCE_ALREADY_EXISTS`. + * __overwrite__ is set to false, this call will throw an exception with + * ``RESOURCE_ALREADY_EXISTS``. * *

A typical workflow for file upload would be: * - *

1. Issue a `create` call and get a handle. 2. Issue one or more `add-block` calls with the - * handle you have. 3. Issue a `close` call with the handle you have. + *

1. Issue a ``create`` call and get a handle. 2. Issue one or more ``add-block`` calls with + * the handle you have. 3. Issue a ``close`` call with the handle you have. */ public CreateResponse create(Create request) { return impl.create(request); @@ -165,7 +166,7 @@ public void move(String sourcePath, String destinationPath) { *

Moves a file from one location to another location within DBFS. If the source file does not * exist, this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. If a file already exists * in the destination path, this call throws an exception with `RESOURCE_ALREADY_EXISTS`. If the - * given source path is a directory, this call always recursively moves all files.", + * given source path is a directory, this call always recursively moves all files. */ public void move(Move request) { impl.move(request); @@ -206,7 +207,7 @@ public ReadResponse read(String path) { * read length exceeds 1 MB, this call throws an exception with `MAX_READ_SIZE_EXCEEDED`. * *

If `offset + length` exceeds the number of bytes in a file, it reads the contents until the - * end of file.", + * end of file. */ public ReadResponse read(ReadDbfsRequest request) { return impl.read(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsService.java index 36aace200..007d6eba3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsService.java @@ -17,10 +17,10 @@ public interface DbfsService { * Append data block. * *

Appends a block of data to the stream specified by the input handle. If the handle does not - * exist, this call will throw an exception with `RESOURCE_DOES_NOT_EXIST`. + * exist, this call will throw an exception with ``RESOURCE_DOES_NOT_EXIST``. * *

If the block of data exceeds 1 MB, this call will throw an exception with - * `MAX_BLOCK_SIZE_EXCEEDED`. + * ``MAX_BLOCK_SIZE_EXCEEDED``. */ void addBlock(AddBlock addBlock); @@ -28,7 +28,7 @@ public interface DbfsService { * Close the stream. * *

Closes the stream specified by the input handle. If the handle does not exist, this call - * throws an exception with `RESOURCE_DOES_NOT_EXIST`. + * throws an exception with ``RESOURCE_DOES_NOT_EXIST``. */ void close(Close close); @@ -37,12 +37,13 @@ public interface DbfsService { * *

Opens a stream to write to a file and returns a handle to this stream. There is a 10 minute * idle timeout on this handle. If a file or directory already exists on the given path and - * __overwrite__ is set to `false`, this call throws an exception with `RESOURCE_ALREADY_EXISTS`. + * __overwrite__ is set to false, this call will throw an exception with + * ``RESOURCE_ALREADY_EXISTS``. * *

A typical workflow for file upload would be: * - *

1. Issue a `create` call and get a handle. 2. Issue one or more `add-block` calls with the - * handle you have. 3. Issue a `close` call with the handle you have. + *

1. Issue a ``create`` call and get a handle. 2. Issue one or more ``add-block`` calls with + * the handle you have. 3. Issue a ``close`` call with the handle you have. */ CreateResponse create(Create create); @@ -106,7 +107,7 @@ public interface DbfsService { *

Moves a file from one location to another location within DBFS. If the source file does not * exist, this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. If a file already exists * in the destination path, this call throws an exception with `RESOURCE_ALREADY_EXISTS`. If the - * given source path is a directory, this call always recursively moves all files.", + * given source path is a directory, this call always recursively moves all files. */ void move(Move move); @@ -135,7 +136,7 @@ public interface DbfsService { * read length exceeds 1 MB, this call throws an exception with `MAX_READ_SIZE_EXCEEDED`. * *

If `offset + length` exceeds the number of bytes in a file, it reads the contents until the - * end of file.", + * end of file. */ ReadResponse read(ReadDbfsRequest readDbfsRequest); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ReadResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ReadResponse.java index 3eec214ab..97a5b3dd0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ReadResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/ReadResponse.java @@ -10,7 +10,7 @@ @Generated public class ReadResponse { /** - * The number of bytes read (could be less than `length` if we hit end of file). This refers to + * The number of bytes read (could be less than ``length`` if we hit end of file). This refers to * number of bytes read in unencoded version (response data is base64-encoded). */ @JsonProperty("bytes_read") diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlRequest.java index 7db3e07d0..2e96227dc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/AccessControlRequest.java @@ -17,10 +17,7 @@ public class AccessControlRequest { @JsonProperty("permission_level") private PermissionLevel permissionLevel; - /** - * Application ID of an active service principal. Setting this field requires the - * `servicePrincipal/user` role. - */ + /** application ID of a service principal */ @JsonProperty("service_principal_name") private String servicePrincipalName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordAccessControlRequest.java index c0508f67c..b9558a0c7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PasswordAccessControlRequest.java @@ -17,10 +17,7 @@ public class PasswordAccessControlRequest { @JsonProperty("permission_level") private PasswordPermissionLevel permissionLevel; - /** - * Application ID of an active service principal. Setting this field requires the - * `servicePrincipal/user` role. - */ + /** application ID of a service principal */ @JsonProperty("service_principal_name") private String servicePrincipalName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlRequest.java index 3bce29c82..d2e6553d1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlRequest.java @@ -17,10 +17,7 @@ public class JobAccessControlRequest { @JsonProperty("permission_level") private JobPermissionLevel permissionLevel; - /** - * Application ID of an active service principal. Setting this field requires the - * `servicePrincipal/user` role. - */ + /** application ID of a service principal */ @JsonProperty("service_principal_name") private String servicePrincipalName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java index 486a5bfee..5c471fbe8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java @@ -35,8 +35,9 @@ public class SubmitTask { /** * If existing_cluster_id, the ID of an existing cluster that is used for all runs of this task. - * When running tasks on an existing cluster, you may need to manually restart the cluster if it - * stops responding. We suggest running jobs on new clusters for greater reliability. + * Only all-purpose clusters are supported. When running tasks on an existing cluster, you may + * need to manually restart the cluster if it stops responding. We suggest running jobs on new + * clusters for greater reliability. */ @JsonProperty("existing_cluster_id") private String existingClusterId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java index b87453fd9..600c0e7a7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java @@ -54,8 +54,9 @@ public class Task { /** * If existing_cluster_id, the ID of an existing cluster that is used for all runs of this task. - * When running tasks on an existing cluster, you may need to manually restart the cluster if it - * stops responding. We suggest running jobs on new clusters for greater reliability. + * Only all-purpose clusters are supported. When running tasks on an existing cluster, you may + * need to manually restart the cluster if it stops responding. We suggest running jobs on new + * clusters for greater reliability. */ @JsonProperty("existing_cluster_id") private String existingClusterId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequest.java index 906deda04..250fa534d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentAccessControlRequest.java @@ -17,10 +17,7 @@ public class ExperimentAccessControlRequest { @JsonProperty("permission_level") private ExperimentPermissionLevel permissionLevel; - /** - * Application ID of an active service principal. Setting this field requires the - * `servicePrincipal/user` role. - */ + /** application ID of a service principal */ @JsonProperty("service_principal_name") private String servicePrincipalName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java index 51e9342fb..112ab4bd8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java @@ -96,7 +96,9 @@ public DeleteRunsResponse deleteRuns(String experimentId, long maxTimestampMilli * Delete runs by creation time. * *

Bulk delete runs in an experiment that were created prior to or at the specified timestamp. - * Deletes at most max_runs per request. + * Deletes at most max_runs per request. To call this API from a Databricks Notebook in Python, + * you can use the client code snippet on + * https://learn.microsoft.com/en-us/azure/databricks/mlflow/runs#bulk-delete. */ public DeleteRunsResponse deleteRuns(DeleteRuns request) { return impl.deleteRuns(request); @@ -388,7 +390,9 @@ public RestoreRunsResponse restoreRuns(String experimentId, long minTimestampMil * Restore runs by deletion time. * *

Bulk restore runs in an experiment that were deleted no earlier than the specified - * timestamp. Restores at most max_runs per request. + * timestamp. Restores at most max_runs per request. To call this API from a Databricks Notebook + * in Python, you can use the client code snippet on + * https://learn.microsoft.com/en-us/azure/databricks/mlflow/runs#bulk-restore. */ public RestoreRunsResponse restoreRuns(RestoreRuns request) { return impl.restoreRuns(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java index 52d87067a..c09fe0503 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java @@ -57,7 +57,9 @@ public interface ExperimentsService { * Delete runs by creation time. * *

Bulk delete runs in an experiment that were created prior to or at the specified timestamp. - * Deletes at most max_runs per request. + * Deletes at most max_runs per request. To call this API from a Databricks Notebook in Python, + * you can use the client code snippet on + * https://learn.microsoft.com/en-us/azure/databricks/mlflow/runs#bulk-delete. */ DeleteRunsResponse deleteRuns(DeleteRuns deleteRuns); @@ -236,7 +238,9 @@ ExperimentPermissions getPermissions( * Restore runs by deletion time. * *

Bulk restore runs in an experiment that were deleted no earlier than the specified - * timestamp. Restores at most max_runs per request. + * timestamp. Restores at most max_runs per request. To call this API from a Databricks Notebook + * in Python, you can use the client code snippet on + * https://learn.microsoft.com/en-us/azure/databricks/mlflow/runs#bulk-restore. */ RestoreRunsResponse restoreRuns(RestoreRuns restoreRuns); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlRequest.java index bc6ba6c45..ec6ad3f55 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/RegisteredModelAccessControlRequest.java @@ -17,10 +17,7 @@ public class RegisteredModelAccessControlRequest { @JsonProperty("permission_level") private RegisteredModelPermissionLevel permissionLevel; - /** - * Application ID of an active service principal. Setting this field requires the - * `servicePrincipal/user` role. - */ + /** application ID of a service principal */ @JsonProperty("service_principal_name") private String servicePrincipalName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java index 1f421fb13..27b567277 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineAccessControlRequest.java @@ -17,10 +17,7 @@ public class PipelineAccessControlRequest { @JsonProperty("permission_level") private PipelinePermissionLevel permissionLevel; - /** - * Application ID of an active service principal. Setting this field requires the - * `servicePrincipal/user` role. - */ + /** application ID of a service principal */ @JsonProperty("service_principal_name") private String servicePrincipalName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlRequest.java index 63893df68..a1c66a4f3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointAccessControlRequest.java @@ -17,10 +17,7 @@ public class ServingEndpointAccessControlRequest { @JsonProperty("permission_level") private ServingEndpointPermissionLevel permissionLevel; - /** - * Application ID of an active service principal. Setting this field requires the - * `servicePrincipal/user` role. - */ + /** application ID of a service principal */ @JsonProperty("service_principal_name") private String servicePrincipalName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenRequest.java index 6f17a5acc..4ae7809b5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenRequest.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Configuration details for creating on-behalf tokens. */ @Generated public class CreateOboTokenRequest { /** Application ID of the service principal. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenResponse.java index 8fd64ad96..2b9157a11 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateOboTokenResponse.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** An on-behalf token was successfully created for the service principal. */ @Generated public class CreateOboTokenResponse { /** */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenResponse.java new file mode 100755 index 000000000..2d4a5d80e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetTokenResponse.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Token with specified Token ID was successfully returned. */ +@Generated +public class GetTokenResponse { + /** */ + @JsonProperty("token_info") + private TokenInfo tokenInfo; + + public GetTokenResponse setTokenInfo(TokenInfo tokenInfo) { + this.tokenInfo = tokenInfo; + return this; + } + + public TokenInfo getTokenInfo() { + return tokenInfo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetTokenResponse that = (GetTokenResponse) o; + return Objects.equals(tokenInfo, that.tokenInfo); + } + + @Override + public int hashCode() { + return Objects.hash(tokenInfo); + } + + @Override + public String toString() { + return new ToStringer(GetTokenResponse.class).add("tokenInfo", tokenInfo).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokenManagementRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokenManagementRequest.java index 5d62ed32e..06734ded6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokenManagementRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokenManagementRequest.java @@ -12,18 +12,18 @@ public class ListTokenManagementRequest { /** User ID of the user that created the token. */ @QueryParam("created_by_id") - private String createdById; + private Long createdById; /** Username of the user that created the token. */ @QueryParam("created_by_username") private String createdByUsername; - public ListTokenManagementRequest setCreatedById(String createdById) { + public ListTokenManagementRequest setCreatedById(Long createdById) { this.createdById = createdById; return this; } - public String getCreatedById() { + public Long getCreatedById() { return createdById; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokensResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokensResponse.java index a17db3bbf..09fa4602d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokensResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokensResponse.java @@ -8,9 +8,10 @@ import java.util.Collection; import java.util.Objects; +/** Tokens were successfully returned. */ @Generated public class ListTokensResponse { - /** */ + /** Token metadata of each user-created token in the workspace */ @JsonProperty("token_infos") private Collection tokenInfos; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlRequest.java index baf5d6388..692f87aee 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenAccessControlRequest.java @@ -17,10 +17,7 @@ public class TokenAccessControlRequest { @JsonProperty("permission_level") private TokenPermissionLevel permissionLevel; - /** - * Application ID of an active service principal. Setting this field requires the - * `servicePrincipal/user` role. - */ + /** application ID of a service principal */ @JsonProperty("service_principal_name") private String servicePrincipalName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementAPI.java index 6e7b6d8fd..965f2e1c8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementAPI.java @@ -26,11 +26,8 @@ public TokenManagementAPI(TokenManagementService mock) { impl = mock; } - public CreateOboTokenResponse createOboToken(String applicationId, long lifetimeSeconds) { - return createOboToken( - new CreateOboTokenRequest() - .setApplicationId(applicationId) - .setLifetimeSeconds(lifetimeSeconds)); + public CreateOboTokenResponse createOboToken(String applicationId) { + return createOboToken(new CreateOboTokenRequest().setApplicationId(applicationId)); } /** @@ -55,7 +52,7 @@ public void delete(DeleteTokenManagementRequest request) { impl.delete(request); } - public TokenInfo get(String tokenId) { + public GetTokenResponse get(String tokenId) { return get(new GetTokenManagementRequest().setTokenId(tokenId)); } @@ -64,7 +61,7 @@ public TokenInfo get(String tokenId) { * *

Gets information about a token, specified by its ID. */ - public TokenInfo get(GetTokenManagementRequest request) { + public GetTokenResponse get(GetTokenManagementRequest request) { return impl.get(request); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementImpl.java index eca3190ef..8008f7cd4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementImpl.java @@ -28,15 +28,16 @@ public CreateOboTokenResponse createOboToken(CreateOboTokenRequest request) { public void delete(DeleteTokenManagementRequest request) { String path = String.format("/api/2.0/token-management/tokens/%s", request.getTokenId()); Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); apiClient.DELETE(path, request, Void.class, headers); } @Override - public TokenInfo get(GetTokenManagementRequest request) { + public GetTokenResponse get(GetTokenManagementRequest request) { String path = String.format("/api/2.0/token-management/tokens/%s", request.getTokenId()); Map headers = new HashMap<>(); headers.put("Accept", "application/json"); - return apiClient.GET(path, request, TokenInfo.class, headers); + return apiClient.GET(path, request, GetTokenResponse.class, headers); } @Override diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementService.java index 55426832b..ab02b56ae 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementService.java @@ -32,7 +32,7 @@ public interface TokenManagementService { * *

Gets information about a token, specified by its ID. */ - TokenInfo get(GetTokenManagementRequest getTokenManagementRequest); + GetTokenResponse get(GetTokenManagementRequest getTokenManagementRequest); /** * Get token permission levels. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesAPI.java index 9a33683ec..20f0e15e2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesAPI.java @@ -76,6 +76,9 @@ public Query get(GetQueryRequest request) { * Get a list of queries. * *

Gets a list of queries. Optionally, this list can be filtered by a search term. + * + *

### **Warning: Calling this API concurrently 10 or more times could result in throttling, + * service degradation, or a temporary ban.** */ public Iterable list(ListQueriesRequest request) { request.setPage(1L); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesService.java index 5788fc471..a4ecf429d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueriesService.java @@ -48,6 +48,9 @@ public interface QueriesService { * Get a list of queries. * *

Gets a list of queries. Optionally, this list can be filtered by a search term. + * + *

### **Warning: Calling this API concurrently 10 or more times could result in throttling, + * service degradation, or a temporary ban.** */ QueryList list(ListQueriesRequest listQueriesRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlRequest.java index 594238f8c..1dc83f0f6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehouseAccessControlRequest.java @@ -17,10 +17,7 @@ public class WarehouseAccessControlRequest { @JsonProperty("permission_level") private WarehousePermissionLevel permissionLevel; - /** - * Application ID of an active service principal. Setting this field requires the - * `servicePrincipal/user` role. - */ + /** application ID of a service principal */ @JsonProperty("service_principal_name") private String servicePrincipalName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlRequest.java index 9a34d3307..1e507c3d6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/RepoAccessControlRequest.java @@ -17,10 +17,7 @@ public class RepoAccessControlRequest { @JsonProperty("permission_level") private RepoPermissionLevel permissionLevel; - /** - * Application ID of an active service principal. Setting this field requires the - * `servicePrincipal/user` role. - */ + /** application ID of a service principal */ @JsonProperty("service_principal_name") private String servicePrincipalName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequest.java index 3d6832dd3..edb8f3b3e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceObjectAccessControlRequest.java @@ -17,10 +17,7 @@ public class WorkspaceObjectAccessControlRequest { @JsonProperty("permission_level") private WorkspaceObjectPermissionLevel permissionLevel; - /** - * Application ID of an active service principal. Setting this field requires the - * `servicePrincipal/user` role. - */ + /** application ID of a service principal */ @JsonProperty("service_principal_name") private String servicePrincipalName; diff --git a/examples/docs/pom.xml b/examples/docs/pom.xml index 90661996d..7ed213e89 100644 --- a/examples/docs/pom.xml +++ b/examples/docs/pom.xml @@ -24,7 +24,7 @@ com.databricks databricks-sdk-java - 0.16.0 + 0.17.0 diff --git a/examples/spring-boot-oauth-u2m-demo/pom.xml b/examples/spring-boot-oauth-u2m-demo/pom.xml index 6236d8896..31278dee0 100644 --- a/examples/spring-boot-oauth-u2m-demo/pom.xml +++ b/examples/spring-boot-oauth-u2m-demo/pom.xml @@ -37,7 +37,7 @@ com.databricks databricks-sdk-java - 0.16.0 + 0.17.0 com.fasterxml.jackson.datatype diff --git a/pom.xml b/pom.xml index bcbd4854f..52e65f14e 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 com.databricks databricks-sdk-parent - 0.16.0 + 0.17.0 pom Databricks SDK for Java The Databricks SDK for Java includes functionality to accelerate development with Java for