diff --git a/.codegen.json b/.codegen.json index bf977d30c..b3c649787 100644 --- a/.codegen.json +++ b/.codegen.json @@ -11,9 +11,14 @@ ".codegen/api.java.tmpl": "databricks-sdk-java/src/main/java/com/databricks/sdk/service/{{.Package.Name}}/{{.PascalName}}API.java", ".codegen/impl.java.tmpl": "databricks-sdk-java/src/main/java/com/databricks/sdk/service/{{.Package.Name}}/{{.PascalName}}Impl.java" }, + "exception_types": { + ".codegen/exception.java.tmpl": "databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/{{.PascalName}}.java" + }, "batch": { ".codegen/workspace.java.tmpl": "databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java", - ".codegen/account.java.tmpl": "databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java" + ".codegen/account.java.tmpl": "databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java", + ".codegen/error-mapper.java.tmpl": "databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ErrorMapper.java", + ".codegen/error-overrides.java.tmpl": "databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ErrorOverrides.java" }, "version": { "pom.xml": "databricks-sdk-parent\n $VERSION", diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 5ea614e9d..b5ff5442f 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -23fe1139476cb1a8c7b8ff2ff1fbc0132380f7a5 \ No newline at end of file +06d330f43d92c1be864d4638c672cd0723e20a51 \ No newline at end of file diff --git a/.codegen/api.java.tmpl b/.codegen/api.java.tmpl index 3e49f75a5..03a5340bc 100644 --- a/.codegen/api.java.tmpl +++ b/.codegen/api.java.tmpl @@ -146,47 +146,55 @@ public class {{.PascalName}}API { {{- end}} {{define "method-call-paginated" -}} - {{- if .Pagination.MultiRequest -}} - {{- if and .Pagination.Offset (not (eq .Path "/api/2.0/clusters/events")) -}} - request.set{{.Pagination.Offset.PascalName}}( - {{- if eq .Pagination.Increment 1 -}} - 1 - {{- else if contains .Path "/scim/v2/" -}} - 1 - {{- else -}} - 0 - {{- end}}L);{{end -}} - {{if and .Pagination.Limit (contains .Path "/scim/v2/")}} - if (request.get{{.Pagination.Limit.PascalName}}() == null) { - request.set{{.Pagination.Limit.PascalName}}(100L); - }{{end -}} - return new Paginator<>(request, impl::{{template "java-name" .}}, {{template "type" .Response}}::get{{.Pagination.Results.PascalName}}, response -> { - {{if eq .Path "/api/2.0/clusters/events" -}} - return response.getNextPage(); + {{- if and .Pagination.Offset (not (eq .Path "/api/2.0/clusters/events")) -}} + request.set{{.Pagination.Offset.PascalName}}( + {{- if eq .Pagination.Increment 1 -}} + 1 + {{- else if contains .Path "/scim/v2/" -}} + 1 + {{- else -}} + 0 + {{- end}}L);{{end -}} + {{if and .Pagination.Limit (contains .Path "/scim/v2/")}} + if (request.get{{.Pagination.Limit.PascalName}}() == null) { + request.set{{.Pagination.Limit.PascalName}}(100L); + }{{end -}} + return new Paginator<>( + {{ if .Request }}request{{ else }}null{{ end }}, + {{ if .Request }}impl::{{template "java-name" .}}{{ else }}(Void v) -> impl.{{template "java-name" .}}(){{ end }}, + {{template "type" .Response}}::get{{.Pagination.Results.PascalName}}, + response -> + {{ if not .Pagination.MultiRequest }} + null + {{- else if eq .Path "/api/2.0/clusters/events" -}} + response.getNextPage() {{- else if .Pagination.Token -}} - String token = response.get{{.Pagination.Token.Bind.PascalName}}(); - if (token == null) { - return null; + { + String token = response.get{{.Pagination.Token.Bind.PascalName}}(); + if (token == null) { + return null; + } + return request.set{{.Pagination.Token.PollField.PascalName}}(token); } - return request.set{{.Pagination.Token.PollField.PascalName}}(token); {{- else if eq .Pagination.Increment 1 -}} - Long page = request.get{{.Pagination.Offset.PascalName}}(); - if (page == null) { - page = 1L; // redash uses 1-based pagination + { + Long page = request.get{{.Pagination.Offset.PascalName}}(); + if (page == null) { + page = 1L; // redash uses 1-based pagination + } + return request.set{{.Pagination.Offset.PascalName}}(page+1L); } - return request.set{{.Pagination.Offset.PascalName}}(page+1L); {{- else -}} - Long offset = request.get{{.Pagination.Offset.PascalName}}(); - if (offset == null) { - offset = 0L; + { + Long offset = request.get{{.Pagination.Offset.PascalName}}(); + if (offset == null) { + offset = 0L; + } + offset += response.get{{.Pagination.Results.PascalName}}().size(); + return request.set{{.Pagination.Offset.PascalName}}(offset); } - offset += response.get{{.Pagination.Results.PascalName}}().size(); - return request.set{{.Pagination.Offset.PascalName}}(offset); {{- end}} - }){{if .NeedsOffsetDedupe -}}.withDedupe({{.Pagination.Entity.PascalName}}::get{{.IdentifierField.PascalName}}){{end}}; - {{- else -}} - return impl.{{template "java-name" .}}({{if .Request}}request{{end}}){{with .Pagination.Results}}.get{{.PascalName}}(){{end}}; - {{- end -}} + ){{if .NeedsOffsetDedupe -}}.withDedupe({{.Pagination.Entity.PascalName}}::get{{.IdentifierField.PascalName}}){{end}}; {{- end}} {{define "method-call-retried" -}} diff --git a/.codegen/error-mapper.java.tmpl b/.codegen/error-mapper.java.tmpl new file mode 100644 index 000000000..0e6056630 --- /dev/null +++ b/.codegen/error-mapper.java.tmpl @@ -0,0 +1,15 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.core.error; + +import com.databricks.sdk.support.Generated; + +@Generated +class ErrorMapper extends AbstractErrorMapper { + public ErrorMapper() { + {{range .ErrorStatusCodeMapping}}statusCode({{.StatusCode}}, com.databricks.sdk.core.error.platform.{{.PascalName}}::new); + {{end}} + {{range .ErrorCodeMapping}}errorCode("{{.ErrorCode}}", com.databricks.sdk.core.error.platform.{{.PascalName}}::new); + {{end}} + } +} diff --git a/.codegen/error-overrides.java.tmpl b/.codegen/error-overrides.java.tmpl new file mode 100644 index 000000000..fb9cff735 --- /dev/null +++ b/.codegen/error-overrides.java.tmpl @@ -0,0 +1,24 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.core.error; + +import java.util.Arrays; +import java.util.List; + +import com.databricks.sdk.support.Generated; + +@Generated +class ErrorOverrides { + static final List> ALL_OVERRIDES = Arrays.asList( +{{- range $i, $x := .ErrorOverrides }} + {{if not (eq $i 0)}}, {{end}}new ErrorOverride<>( + "{{$x.Name}}", + "{{ replaceAll "\\" "\\\\" $x.PathRegex}}", + "{{$x.Verb}}", + "{{ replaceAll "\\" "\\\\" $x.StatusCodeMatcher}}", + "{{ replaceAll "\\" "\\\\" $x.ErrorCodeMatcher}}", + "{{ replaceAll "\\" "\\\\" $x.MessageMatcher}}", + com.databricks.sdk.core.error.platform.{{$x.OverrideErrorCode.PascalName}}.class) +{{- end}} + ); +} diff --git a/.codegen/exception.java.tmpl b/.codegen/exception.java.tmpl new file mode 100644 index 000000000..2e285c3f5 --- /dev/null +++ b/.codegen/exception.java.tmpl @@ -0,0 +1,25 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.core.error.platform; + +import com.databricks.sdk.core.DatabricksError; +import com.databricks.sdk.core.error.ErrorDetail; +import com.databricks.sdk.support.Generated; + +import java.util.List; + +/** + {{.Comment " * " 80}} + */ +@Generated +public class {{.PascalName}} extends {{if .Inherit -}}{{.Inherit.PascalName}}{{else}}DatabricksError{{end}} { + public {{.PascalName}}(String message, List details) { + super("{{.Name}}", message, {{if not .Inherit}}{{.StatusCode}}, {{end}}details); + } + + {{if not .Inherit}} + public {{.PascalName}}(String errorCode, String message, List details) { + super(errorCode, message, {{.StatusCode}}, details); + } + {{end}} +} diff --git a/.codegen/impl.java.tmpl b/.codegen/impl.java.tmpl index 1cda1d341..3a0ecd41f 100644 --- a/.codegen/impl.java.tmpl +++ b/.codegen/impl.java.tmpl @@ -9,6 +9,7 @@ import java.util.HashMap; import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Encoding; import com.databricks.sdk.support.Generated; {{range .Package.ImportedEntities}} @@ -25,9 +26,7 @@ class {{.PascalName}}Impl implements {{.PascalName}}Service { {{range .Methods}} @Override public {{if not .Response.IsEmpty -}}{{template "type" .Response}}{{else}}void{{end}} {{.CamelName}}{{if .IsNameReserved}}Content{{end}}({{if .Request}}{{template "type" .Request}} request{{end}}) { - String path = {{if .PathParts -}} - String.format("{{range .PathParts}}{{.Prefix}}{{if or .Field .IsAccountId}}%s{{end}}{{ end }}"{{ range .PathParts }}{{if .Field}}, request.get{{.Field.PascalName}}(){{ else if .IsAccountId }}, apiClient.configuredAccountID(){{end}}{{ end }}) - {{- else}}"{{.Path}}"{{end}}; + String path = {{ template "path" . }}; {{ template "headers" . -}} {{ if .Response.IsEmpty -}} {{ template "api-call" . }} @@ -39,6 +38,23 @@ class {{.PascalName}}Impl implements {{.PascalName}}Service { {{end}} } +{{- define "path" -}} +{{- if .PathParts -}} + String.format("{{range .PathParts -}} + {{- .Prefix -}} + {{- if or .Field .IsAccountId -}}%s{{- end -}} + {{- end -}}" + {{- range .PathParts -}} + {{- if and .Field .Field.IsPathMultiSegment -}}, Encoding.encodeMultiSegmentPathParameter(request.get{{.Field.PascalName}}()) + {{- else if .Field -}}, request.get{{.Field.PascalName}}() + {{- else if .IsAccountId -}}, apiClient.configuredAccountID() + {{- end -}} + {{- end -}}) +{{- else -}} + "{{.Path}}" +{{- end -}} +{{- end -}} + {{ define "api-call" }} apiClient.{{.Verb}}(path {{- if .Request}}, {{ template "request-param" .}}{{end}} diff --git a/.gitattributes b/.gitattributes index fdfd0581c..72347ca14 100755 --- a/.gitattributes +++ b/.gitattributes @@ -1,5 +1,27 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ErrorMapper.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ErrorOverrides.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Aborted.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/AlreadyExists.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/BadRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Cancelled.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/DataLoss.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/DeadlineExceeded.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/InternalError.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/InvalidParameterValue.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/NotFound.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/NotImplemented.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/PermissionDenied.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/RequestLimitExceeded.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/ResourceAlreadyExists.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/ResourceConflict.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/ResourceDoesNotExist.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/ResourceExhausted.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/TemporarilyUnavailable.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/TooManyRequests.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Unauthenticated.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Unknown.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BillableUsageService.java linguist-generated=true @@ -61,8 +83,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAll databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactMatcher.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AssignResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRole.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentity.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureServicePrincipal.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshResponse.java linguist-generated=true @@ -95,6 +119,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateFunct databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastore.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMetastoreAssignment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMonitor.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateRegisteredModelRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateSchema.java linguist-generated=true @@ -196,6 +221,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMo databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountMetastoreAssignmentsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListAccountStorageCredentialsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListConnectionsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java linguist-generated=true @@ -234,19 +260,20 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersio databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedule.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedulePauseStatus.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCustomMetric.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCustomMetricType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDataClassificationConfig.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDestinations.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProfileType.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProfileTypeProblemType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDestination.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLog.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProblemType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfoStatus.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorNotificationsConfig.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorMetric.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorMetricType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorNotifications.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshInfoState.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorSnapshotProfileType.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeriesProfileType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshInfoTrigger.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorSnapshot.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeries.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NamedTableConstraint.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTable.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpec.java linguist-generated=true @@ -331,7 +358,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateSto databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResult.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResultOperation.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResultResult.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ViewData.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumesAPI.java linguist-generated=true @@ -356,6 +382,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CancelRespo databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwner.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ChangeClusterOwnerResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClientsTypes.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloneCluster.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloudProviderNodeInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloudProviderNodeStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAccessControlRequest.java linguist-generated=true @@ -383,7 +410,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoli databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSize.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSource.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterStatusRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterStatusResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersService.java linguist-generated=true @@ -394,8 +422,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandExec databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandStatusRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CommandStatusResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ComputeSpec.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ComputeSpecKind.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStatusRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStatusResponse.java linguist-generated=true @@ -436,6 +462,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstanc databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicy.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditPolicyResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventDetails.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventDetailsCause.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EventType.java linguist-generated=true @@ -504,7 +531,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesIm databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Library.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryFullStatus.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryFullStatusStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryInstallStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListAllClusterLibraryStatusesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListAvailableZonesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClusterPoliciesRequest.java linguist-generated=true @@ -564,11 +591,22 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateRespo databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/VolumesStorageInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkloadType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkspaceStorageInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Dashboard.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetDashboardRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LifecycleState.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MigrateDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishedDashboard.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlock.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlockResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Close.java linguist-generated=true @@ -692,6 +730,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/Permission.java databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionAssignment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionAssignments.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionLevel.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsDescription.java linguist-generated=true @@ -719,7 +762,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UsersService.ja databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentService.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentsUpdated.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspacePermission.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspacePermissions.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseJob.java linguist-generated=true @@ -735,7 +777,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ConditionTask. databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ConditionTaskOp.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Continuous.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJobEditMode.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CronSchedule.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtOutput.java linguist-generated=true @@ -765,10 +806,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java lingu databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobAccessControlResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobCluster.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobCompute.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeployment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeploymentKind.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEditMode.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEmailNotifications.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEnvironment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobNotificationSettings.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobParameter.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobParameterDefinition.java linguist-generated=true @@ -779,7 +821,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissions databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobPermissionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobRunAs.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettingsEditMode.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSource.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSourceDirtyState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobsAPI.java linguist-generated=true @@ -793,7 +834,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsReques databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsResponse.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsRunType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/NotebookOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/NotebookTask.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PauseStatus.java linguist-generated=true @@ -817,7 +857,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedString databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedValues.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunConditionTask.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunConditionTaskOp.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunForEachTask.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunIf.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobOutput.java linguist-generated=true @@ -853,7 +892,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskSubscri databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRunResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TableTriggerConfiguration.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TableUpdateTriggerConfiguration.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskDependency.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskEmailNotifications.java linguist-generated=true @@ -868,7 +907,173 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ViewType.java databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ViewsToExport.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Webhook.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotifications.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotificationsOnDurationWarningThresholdExceededItem.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Category.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerFulfillmentsAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerFulfillmentsImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerFulfillmentsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerListingsAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerListingsImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerListingsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerPersonalizationRequestsAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerPersonalizationRequestsImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerPersonalizationRequestsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerProvidersAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerProvidersImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerProvidersService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerTerms.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ContactInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Cost.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeFilterRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeFilterResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateFileRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateFileResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateInstallationRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateListingRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateListingResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreatePersonalizationRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreatePersonalizationRequestResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateProviderRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateProviderResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DataRefresh.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DataRefreshInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeltaSharingRecipientType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Exchange.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangeFilter.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangeFilterType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangeListing.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileParent.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileParentType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FilterType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FulfillmentType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetExchangeRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetExchangeResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetFileRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetFileResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetLatestVersionProviderAnalyticsDashboardResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingContentMetadataRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingContentMetadataResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetPersonalizationRequestRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetPersonalizationRequestResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetProviderRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetProviderResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Installation.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/InstallationDetail.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/InstallationStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllInstallationsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllInstallationsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllPersonalizationRequestsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllPersonalizationRequestsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangeFiltersRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangeFiltersResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesForListingRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesForListingResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFilesRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFilesResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFulfillmentsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFulfillmentsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListInstallationsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListInstallationsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsForExchangeRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsForExchangeResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProviderAnalyticsDashboardResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProvidersRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProvidersResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Listing.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingDetail.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingFulfillment.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSetting.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingShareType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSummary.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingTag.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingTagType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/MarketplaceFileType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/PersonalizationRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/PersonalizationRequestStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderAnalyticsDashboard.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderPersonalizationRequestsAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderPersonalizationRequestsImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderPersonalizationRequestsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProviderAnalyticsDashboardsAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProviderAnalyticsDashboardsImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProviderAnalyticsDashboardsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RegionInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RepoInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RepoInstallation.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SearchListingsRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SearchListingsResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ShareInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SharedDataObject.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SortBy.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SortBySpec.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SortOrder.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/TokenDetail.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/TokenInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeFilterRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeFilterResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateInstallationRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateInstallationResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateListingRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateListingResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdatePersonalizationRequestRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdatePersonalizationRequestResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderAnalyticsDashboardRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderAnalyticsDashboardResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Visibility.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/VisibilityFilter.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/Activity.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityAction.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ActivityType.java linguist-generated=true @@ -1099,6 +1304,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CronTrigg databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DataPlaneId.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeletePipelineResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeploymentKind.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipelineResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ErrorDetail.java linguist-generated=true @@ -1129,6 +1335,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineA databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineCluster.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineClusterAutoscale.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineClusterAutoscaleMode.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineDeployment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineEvent.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineLibrary.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinePermission.java linguist-generated=true @@ -1238,6 +1445,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Worksp databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/Ai21LabsConfig.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfig.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfigBedrockProvider.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AnthropicConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AppEvents.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AppManifest.java linguist-generated=true @@ -1248,8 +1457,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AppsService databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigInput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureConfigOutput.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AutoCaptureState.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AwsBedrockConfig.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AwsBedrockConfigBedrockProvider.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BuildLogsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BuildLogsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ChatMessage.java linguist-generated=true @@ -1338,6 +1545,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpA databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateSetting.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageEnablementDetails.java linguist-generated=true @@ -1360,9 +1570,18 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Credential databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CredentialsManagerImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CredentialsManagerService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablement.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccount.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountSetting.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementSetting.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceSetting.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessListRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultNamespaceSettingRequest.java linguist-generated=true @@ -1378,8 +1597,14 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteRest databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteRestrictWorkspaceAdminsSettingResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteTokenManagementRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablement.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccount.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountSetting.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementSetting.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeToken.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ExchangeTokenRequest.java linguist-generated=true @@ -1416,6 +1641,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListPublic databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokenManagementRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListTokensResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ListType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAwsStableIpRule.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRule.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRuleConnectionState.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAzurePrivateEndpointRuleGroupId.java linguist-generated=true @@ -1428,14 +1654,20 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkCon databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PartitionId.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeMessage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeMessageEnum.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeSetting.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PublicTokenInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceIpAccessList.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ReplaceResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsMessage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsMessageStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsSetting.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RevokeTokenRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RevokeTokenResponse.java linguist-generated=true @@ -1540,6 +1772,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ShareInfo.j databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharePermissionsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ShareToPrivilegeAssignment.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectDataObjectType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectHistoryDataSharingStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectUpdate.java linguist-generated=true diff --git a/CHANGELOG.md b/CHANGELOG.md index 35fca90ed..00d1251dc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,385 @@ # Version changelog +## 0.23.0 + +### Improvements and Bug Fixes +* Introduce more specific exceptions, like `NotFound`, `AlreadyExists`, `BadRequest`, `PermissionDenied`, `InternalError`, and others ([#185](https://github.com/databricks/databricks-sdk-java/pull/185), [#257](https://github.com/databricks/databricks-sdk-java/pull/257)). +* Lock around field accessibility changes ([#247](https://github.com/databricks/databricks-sdk-java/pull/247)). +* Fix Changelog ([#258](https://github.com/databricks/databricks-sdk-java/pull/258)). +* Support post with no body for APIs ([#262](https://github.com/databricks/databricks-sdk-java/pull/262)). + +API Changes: + + * Changed `cancelRefresh()` method for `workspaceClient.lakehouseMonitors()` service with new required argument order. + * Changed `create()` method for `workspaceClient.lakehouseMonitors()` service with new required argument order. + * Changed `delete()` method for `workspaceClient.lakehouseMonitors()` service with new required argument order. + * Changed `get()` method for `workspaceClient.lakehouseMonitors()` service with new required argument order. + * Changed `getRefresh()` method for `workspaceClient.lakehouseMonitors()` service with new required argument order. + * Changed `listRefreshes()` method for `workspaceClient.lakehouseMonitors()` service with new required argument order. + * Changed `runRefresh()` method for `workspaceClient.lakehouseMonitors()` service with new required argument order. + * Changed `update()` method for `workspaceClient.lakehouseMonitors()` service with new required argument order. + * Removed `com.databricks.sdk.service.catalog.AzureManagedIdentity` class. + * Removed `fullName` field for `com.databricks.sdk.service.catalog.CancelRefreshRequest`. + * Added `tableName` field for `com.databricks.sdk.service.catalog.CancelRefreshRequest`. + * Changed `customMetrics` field for `com.databricks.sdk.service.catalog.CreateMonitor` to `com.databricks.sdk.service.catalog.MonitorMetricList` class. + * Removed `fullName` field for `com.databricks.sdk.service.catalog.CreateMonitor`. + * Changed `inferenceLog` field for `com.databricks.sdk.service.catalog.CreateMonitor` to `com.databricks.sdk.service.catalog.MonitorInferenceLog` class. + * Changed `notifications` field for `com.databricks.sdk.service.catalog.CreateMonitor` to `com.databricks.sdk.service.catalog.MonitorNotifications` class. + * Changed `snapshot` field for `com.databricks.sdk.service.catalog.CreateMonitor` to `Object` class. + * Changed `timeSeries` field for `com.databricks.sdk.service.catalog.CreateMonitor` to `com.databricks.sdk.service.catalog.MonitorTimeSeries` class. + * Added `tableName` field for `com.databricks.sdk.service.catalog.CreateMonitor`. + * Changed `azureManagedIdentity` field for `com.databricks.sdk.service.catalog.CreateStorageCredential` to `com.databricks.sdk.service.catalog.AzureManagedIdentityRequest` class. + * Removed `fullName` field for `com.databricks.sdk.service.catalog.DeleteLakehouseMonitorRequest`. + * Added `tableName` field for `com.databricks.sdk.service.catalog.DeleteLakehouseMonitorRequest`. + * Removed `fullName` field for `com.databricks.sdk.service.catalog.GetLakehouseMonitorRequest`. + * Added `tableName` field for `com.databricks.sdk.service.catalog.GetLakehouseMonitorRequest`. + * Removed `fullName` field for `com.databricks.sdk.service.catalog.GetRefreshRequest`. + * Added `tableName` field for `com.databricks.sdk.service.catalog.GetRefreshRequest`. + * Removed `fullName` field for `com.databricks.sdk.service.catalog.ListRefreshesRequest`. + * Added `tableName` field for `com.databricks.sdk.service.catalog.ListRefreshesRequest`. + * Changed `quartzCronExpression` field for `com.databricks.sdk.service.catalog.MonitorCronSchedule` to be required. + * Changed `timezoneId` field for `com.databricks.sdk.service.catalog.MonitorCronSchedule` to be required. + * Removed `com.databricks.sdk.service.catalog.MonitorCustomMetric` class. + * Removed `com.databricks.sdk.service.catalog.MonitorCustomMetricType` class. + * Removed `com.databricks.sdk.service.catalog.MonitorDestinations` class. + * Removed `com.databricks.sdk.service.catalog.MonitorInferenceLogProfileType` class. + * Removed `com.databricks.sdk.service.catalog.MonitorInferenceLogProfileTypeProblemType` class. + * Changed `customMetrics` field for `com.databricks.sdk.service.catalog.MonitorInfo` to `com.databricks.sdk.service.catalog.MonitorMetricList` class. + * Changed `driftMetricsTableName` field for `com.databricks.sdk.service.catalog.MonitorInfo` to be required. + * Changed `inferenceLog` field for `com.databricks.sdk.service.catalog.MonitorInfo` to `com.databricks.sdk.service.catalog.MonitorInferenceLog` class. + * Changed `monitorVersion` field for `com.databricks.sdk.service.catalog.MonitorInfo` to be required. + * Changed `notifications` field for `com.databricks.sdk.service.catalog.MonitorInfo` to `com.databricks.sdk.service.catalog.MonitorNotifications` class. + * Changed `profileMetricsTableName` field for `com.databricks.sdk.service.catalog.MonitorInfo` to be required. + * Changed `snapshot` field for `com.databricks.sdk.service.catalog.MonitorInfo` to `Object` class. + * Changed `status` field for `com.databricks.sdk.service.catalog.MonitorInfo` to be required. + * Changed `tableName` field for `com.databricks.sdk.service.catalog.MonitorInfo` to be required. + * Changed `timeSeries` field for `com.databricks.sdk.service.catalog.MonitorInfo` to `com.databricks.sdk.service.catalog.MonitorTimeSeries` class. + * Removed `com.databricks.sdk.service.catalog.MonitorNotificationsConfig` class. + * Changed `refreshId` field for `com.databricks.sdk.service.catalog.MonitorRefreshInfo` to be required. + * Changed `startTimeMs` field for `com.databricks.sdk.service.catalog.MonitorRefreshInfo` to be required. + * Changed `state` field for `com.databricks.sdk.service.catalog.MonitorRefreshInfo` to be required. + * Added `trigger` field for `com.databricks.sdk.service.catalog.MonitorRefreshInfo`. + * Removed `Object` class. + * Removed `com.databricks.sdk.service.catalog.MonitorTimeSeriesProfileType` class. + * Removed `fullName` field for `com.databricks.sdk.service.catalog.RunRefreshRequest`. + * Added `tableName` field for `com.databricks.sdk.service.catalog.RunRefreshRequest`. + * Changed `azureManagedIdentity` field for `com.databricks.sdk.service.catalog.StorageCredentialInfo` to `com.databricks.sdk.service.catalog.AzureManagedIdentityResponse` class. + * Removed `name` field for `com.databricks.sdk.service.catalog.TableRowFilter`. + * Added `functionName` field for `com.databricks.sdk.service.catalog.TableRowFilter`. + * Changed `customMetrics` field for `com.databricks.sdk.service.catalog.UpdateMonitor` to `com.databricks.sdk.service.catalog.MonitorMetricList` class. + * Removed `fullName` field for `com.databricks.sdk.service.catalog.UpdateMonitor`. + * Changed `inferenceLog` field for `com.databricks.sdk.service.catalog.UpdateMonitor` to `com.databricks.sdk.service.catalog.MonitorInferenceLog` class. + * Changed `notifications` field for `com.databricks.sdk.service.catalog.UpdateMonitor` to `com.databricks.sdk.service.catalog.MonitorNotifications` class. + * Changed `snapshot` field for `com.databricks.sdk.service.catalog.UpdateMonitor` to `Object` class. + * Changed `timeSeries` field for `com.databricks.sdk.service.catalog.UpdateMonitor` to `com.databricks.sdk.service.catalog.MonitorTimeSeries` class. + * Added `tableName` field for `com.databricks.sdk.service.catalog.UpdateMonitor`. + * Changed `azureManagedIdentity` field for `com.databricks.sdk.service.catalog.UpdateStorageCredential` to `com.databricks.sdk.service.catalog.AzureManagedIdentityResponse` class. + * Changed `azureManagedIdentity` field for `com.databricks.sdk.service.catalog.ValidateStorageCredential` to `com.databricks.sdk.service.catalog.AzureManagedIdentityRequest` class. + * Removed `operation` field for `com.databricks.sdk.service.catalog.ValidationResult`. + * Added `awsOperation` field for `com.databricks.sdk.service.catalog.ValidationResult`. + * Added `azureOperation` field for `com.databricks.sdk.service.catalog.ValidationResult`. + * Added `gcpOperation` field for `com.databricks.sdk.service.catalog.ValidationResult`. + * Removed `com.databricks.sdk.service.catalog.ValidationResultOperation` class. + * Added `com.databricks.sdk.service.catalog.AzureManagedIdentityRequest` class. + * Added `com.databricks.sdk.service.catalog.AzureManagedIdentityResponse` class. + * Added `com.databricks.sdk.service.catalog.MonitorDestination` class. + * Added `com.databricks.sdk.service.catalog.MonitorInferenceLog` class. + * Added `com.databricks.sdk.service.catalog.MonitorInferenceLogProblemType` class. + * Added `com.databricks.sdk.service.catalog.MonitorMetric` class. + * Added `com.databricks.sdk.service.catalog.MonitorMetricType` class. + * Added `com.databricks.sdk.service.catalog.MonitorNotifications` class. + * Added `com.databricks.sdk.service.catalog.MonitorRefreshInfoTrigger` class. + * Added `Object` class. + * Added `com.databricks.sdk.service.catalog.MonitorTimeSeries` class. + * Added `com.databricks.sdk.service.catalog.ValidationResultAwsOperation` class. + * Added `com.databricks.sdk.service.catalog.ValidationResultAzureOperation` class. + * Added `com.databricks.sdk.service.catalog.ValidationResultGcpOperation` class. + * Added `cloneFrom` field for `com.databricks.sdk.service.compute.ClusterSpec`. + * Removed `com.databricks.sdk.service.compute.ComputeSpec` class. + * Removed `com.databricks.sdk.service.compute.ComputeSpecKind` class. + * Added `cloneFrom` field for `com.databricks.sdk.service.compute.CreateCluster`. + * Added `cloneFrom` field for `com.databricks.sdk.service.compute.EditCluster`. + * Added `com.databricks.sdk.service.compute.CloneCluster` class. + * Added `com.databricks.sdk.service.compute.Environment` class. + * Changed `update()` method for `accountClient.workspaceAssignment()` service to return `com.databricks.sdk.service.iam.PermissionAssignment` class. + * Removed `Object` class. + * Removed `computeKey` field for `com.databricks.sdk.service.jobs.ClusterSpec`. + * Removed `compute` field for `com.databricks.sdk.service.jobs.CreateJob`. + * Added `environments` field for `com.databricks.sdk.service.jobs.CreateJob`. + * Removed `com.databricks.sdk.service.jobs.JobCompute` class. + * Removed `compute` field for `com.databricks.sdk.service.jobs.JobSettings`. + * Added `environments` field for `com.databricks.sdk.service.jobs.JobSettings`. + * Removed `computeKey` field for `com.databricks.sdk.service.jobs.RunTask`. + * Removed `com.databricks.sdk.service.jobs.TableTriggerConfiguration` class. + * Removed `computeKey` field for `com.databricks.sdk.service.jobs.Task`. + * Added `environmentKey` field for `com.databricks.sdk.service.jobs.Task`. + * Changed `table` field for `com.databricks.sdk.service.jobs.TriggerSettings` to `com.databricks.sdk.service.jobs.TableUpdateTriggerConfiguration` class. + * Changed `tableUpdate` field for `com.databricks.sdk.service.jobs.TriggerSettings` to `com.databricks.sdk.service.jobs.TableUpdateTriggerConfiguration` class. + * Added `com.databricks.sdk.service.jobs.JobEnvironment` class. + * Added `com.databricks.sdk.service.jobs.TableUpdateTriggerConfiguration` class. + * Added `com.databricks.sdk.service.marketplace` package. + +OpenAPI SHA: 94684175b8bd65f8701f89729351f8069e8309c9, Date: 2024-04-11 + + +## 0.22.0 + +Improvements and Bug Fixes +* Properly escape multi-segment path parameters ([#252](https://github.com/databricks/databricks-sdk-java/pull/252)). + +API Changes: +* Added `migrate()` and `unpublish()` methods for `workspaceClient.lakeview()` service. +* Added `com.databricks.sdk.service.dashboards.MigrateDashboardRequest` and `com.databricks.sdk.service.dashboards.UnpublishDashboardRequest` class. +* Added `description`, `queueDuration` and `repairHistory` fields for `com.databricks.sdk.service.jobs.BaseRun`. +* Added `computeKey` and `jobClusterKey` field for `com.databricks.sdk.service.jobs.ClusterSpec`. +* Changed `left`, `op` and `right` fields for `com.databricks.sdk.service.jobs.ConditionTask` to be required. +* Changed `editMode` field for `com.databricks.sdk.service.jobs.CreateJob` to `com.databricks.sdk.service.jobs.JobEditMode` class. +* Replaced `com.databricks.sdk.service.jobs.CreateJobEditMode` class by `com.databricks.sdk.service.jobs.JobEditMode`. +* Changed `url` field for `com.databricks.sdk.service.jobs.FileArrivalTriggerConfiguration` to be required. +* Changed `errorMessageStats` field for `com.databricks.sdk.service.jobs.ForEachStats` to `com.databricks.sdk.service.jobs.ForEachTaskErrorMessageStatsList` class. +* Changed `newCluster` field for `com.databricks.sdk.service.jobs.JobCluster` to be required. +* Changed `editMode` field for `com.databricks.sdk.service.jobs.JobSettings` to `com.databricks.sdk.service.jobs.JobEditMode` class. +* Removed `com.databricks.sdk.service.jobs.JobSettingsEditMode` class. +* Changed `metric`, `op` and `value` fields for `com.databricks.sdk.service.jobs.JobsHealthRule` to be required. +* Changed `runType` field for `com.databricks.sdk.service.jobs.ListRunsRequest` to `com.databricks.sdk.service.jobs.RunType` class. +* Replaced `com.databricks.sdk.service.jobs.ListRunsRunType` class by `com.databricks.sdk.service.jobs.RunType`. +* Removed `com.databricks.sdk.service.jobs.ParamPairs` class. +* Changed `pipelineId` field for `com.databricks.sdk.service.jobs.PipelineTask` to be required. +* Changed `entryPoint` and `packageName` fields for `com.databricks.sdk.service.jobs.PythonWheelTask` to be required. +* Changed `jobParameters` field for `com.databricks.sdk.service.jobs.RepairRun` to Map class. +* Changed `baseParameters` field for `com.databricks.sdk.service.jobs.ResolvedNotebookTaskValues` to Map class. +* Changed `parameters` field for `com.databricks.sdk.service.jobs.ResolvedParamPairValues` to Map class. +* Changed `namedParameters` field for `com.databricks.sdk.service.jobs.ResolvedPythonWheelTaskValues` to Map class. +* Removed `namedParameters` field for `com.databricks.sdk.service.jobs.ResolvedRunJobTaskValues`. +* Changed `parameters` field for `com.databricks.sdk.service.jobs.ResolvedRunJobTaskValues` to Map class. +* Added `jobParameters` field for `com.databricks.sdk.service.jobs.ResolvedRunJobTaskValues`. +* Added `description` and `queueDuration` fields for `com.databricks.sdk.service.jobs.Run`. +* Changed `op` field for `com.databricks.sdk.service.jobs.RunConditionTask` to `com.databricks.sdk.service.jobs.ConditionTaskOp` class. +* Removed `com.databricks.sdk.service.jobs.RunConditionTaskOp` class. +* Changed `inputs` and `task` field for `com.databricks.sdk.service.jobs.RunForEachTask` to be required. +* Changed `jobParameters` field for `com.databricks.sdk.service.jobs.RunJobTask` to Map class. +* Added `dbtCommands`, `jarParams`, `notebookParams`, `pipelineParams`, `pythonNamedParams`, `pythonParams`, `sparkSubmitParams` and `sqlParams` fields for `com.databricks.sdk.service.jobs.RunJobTask`. +* Changed `jobParameters` field for `com.databricks.sdk.service.jobs.RunNow` to Map class. +* Added `info` field for `com.databricks.sdk.service.jobs.RunOutput`. +* Removed `jobParameters` field for `com.databricks.sdk.service.jobs.RunParameters`. +* Changed `taskKey` field for `com.databricks.sdk.service.jobs.RunTask` to be required. +* Added `computeKey`, `emailNotifications`, `jobClusterKey`, `notificationSettings`, `runDuration`, `runPageUrl`, `timeoutSeconds` and `webhookNotifications` fields for `com.databricks.sdk.service.jobs.RunTask`. +* Added `endpointId` field for `com.databricks.sdk.service.jobs.SqlQueryOutput`. +* Added `conditionTask`, `dbtTask`, `notebookTask`, `pipelineTask`, `pythonWheelTask`, `runJobTask`, `sparkJarTask`, `sparkPythonTask`, `sparkSubmitTask` and `sqlTask` fields for `com.databricks.sdk.service.jobs.SubmitRun`. +* Added `description` field for `com.databricks.sdk.service.jobs.SubmitTask`. +* Added `disableAutoOptimization` field for `com.databricks.sdk.service.jobs.Task`. +* Added `noAlertForSkippedRuns` field for `com.databricks.sdk.service.jobs.TaskEmailNotifications`. +* Added `tableUpdate` field for `com.databricks.sdk.service.jobs.TriggerSettings`. +* Changed `id` field for `com.databricks.sdk.service.jobs.Webhook` to be required. +* Changed `onDurationWarningThresholdExceeded` field for `com.databricks.sdk.service.jobs.WebhookNotifications` to `com.databricks.sdk.service.jobs.WebhookList` class. +* Removed `com.databricks.sdk.service.jobs.WebhookNotificationsOnDurationWarningThresholdExceededItem` class. +* Added `com.databricks.sdk.service.jobs.JobEditMode` class. +* Replaced `com.databricks.sdk.service.serving.AwsBedrockConfig` class by `com.databricks.sdk.service.serving.AmazonBedrockConfig` class. +* Replaced `com.databricks.sdk.service.serving.AwsBedrockConfigBedrockProvider` class by `com.databricks.sdk.service.serving.AmazonBedrockConfigBedrockProvider` class. +* Replaced `awsBedrockConfig` field for `com.databricks.sdk.service.serving.ExternalModel` by `amazonBedrockConfig`. +* Changed `get()` method for `workspaceClient.ipAccessLists()` service . New request type is `com.databricks.sdk.service.settings.GetIpAccessListRequest` class. +* Replaced `com.databricks.sdk.service.settings.GetIpAccessList` class by `com.databricks.sdk.service.settings.GetIpAccessListRequest` class. + +OpenAPI SHA: d38528c3e47dd81c9bdbd918272a3e49d36e09ce, Date: 2024-03-27 + + +## 0.21.0 + + +API Changes: + + * Changed `list()` method for `workspaceClient.catalogs()` service to require request of `com.databricks.sdk.service.catalog.ListCatalogsRequest` class. + * Changed `create()` method for `workspaceClient.onlineTables()` service . New request type is `com.databricks.sdk.service.catalog.CreateOnlineTableRequest` class. + * Removed `com.databricks.sdk.service.catalog.AwsIamRole` class. + * Changed `notifications` field for `com.databricks.sdk.service.catalog.CreateMonitor` to `com.databricks.sdk.service.catalog.MonitorNotificationsConfig` class. + * Changed `awsIamRole` field for `com.databricks.sdk.service.catalog.CreateStorageCredential` to `com.databricks.sdk.service.catalog.AwsIamRoleRequest` class. + * Added `browseOnly` field for `com.databricks.sdk.service.catalog.ExternalLocationInfo`. + * Added `browseOnly` field for `com.databricks.sdk.service.catalog.FunctionInfo`. + * Added `includeBrowse` field for `com.databricks.sdk.service.catalog.GetCatalogRequest`. + * Added `includeBrowse` field for `com.databricks.sdk.service.catalog.GetExternalLocationRequest`. + * Added `includeBrowse` field for `com.databricks.sdk.service.catalog.GetFunctionRequest`. + * Added `includeBrowse` field for `com.databricks.sdk.service.catalog.GetModelVersionRequest`. + * Added `includeBrowse` field for `com.databricks.sdk.service.catalog.GetRegisteredModelRequest`. + * Added `includeBrowse` field for `com.databricks.sdk.service.catalog.GetSchemaRequest`. + * Added `includeBrowse` field for `com.databricks.sdk.service.catalog.GetTableRequest`. + * Added `includeBrowse` field for `com.databricks.sdk.service.catalog.ListExternalLocationsRequest`. + * Added `includeBrowse` field for `com.databricks.sdk.service.catalog.ListFunctionsRequest`. + * Added `includeBrowse` field for `com.databricks.sdk.service.catalog.ListModelVersionsRequest`. + * Added `includeBrowse` field for `com.databricks.sdk.service.catalog.ListRegisteredModelsRequest`. + * Added `includeBrowse` field for `com.databricks.sdk.service.catalog.ListSchemasRequest`. + * Added `includeBrowse` field for `com.databricks.sdk.service.catalog.ListTablesRequest`. + * Added `includeBrowse` field for `com.databricks.sdk.service.catalog.ListVolumesRequest`. + * Added `browseOnly` field for `com.databricks.sdk.service.catalog.ModelVersionInfo`. + * Changed `notifications` field for `com.databricks.sdk.service.catalog.MonitorInfo` to `com.databricks.sdk.service.catalog.MonitorNotificationsConfig` class. + * Added `includeBrowse` field for `com.databricks.sdk.service.catalog.ReadVolumeRequest`. + * Added `browseOnly` field for `com.databricks.sdk.service.catalog.RegisteredModelInfo`. + * Added `browseOnly` field for `com.databricks.sdk.service.catalog.SchemaInfo`. + * Changed `awsIamRole` field for `com.databricks.sdk.service.catalog.StorageCredentialInfo` to `com.databricks.sdk.service.catalog.AwsIamRoleResponse` class. + * Added `browseOnly` field for `com.databricks.sdk.service.catalog.TableInfo`. + * Changed `notifications` field for `com.databricks.sdk.service.catalog.UpdateMonitor` to `com.databricks.sdk.service.catalog.MonitorNotificationsConfig` class. + * Changed `awsIamRole` field for `com.databricks.sdk.service.catalog.UpdateStorageCredential` to `com.databricks.sdk.service.catalog.AwsIamRoleRequest` class. + * Changed `awsIamRole` field for `com.databricks.sdk.service.catalog.ValidateStorageCredential` to `com.databricks.sdk.service.catalog.AwsIamRoleRequest` class. + * Removed `com.databricks.sdk.service.catalog.ViewData` class. + * Added `browseOnly` field for `com.databricks.sdk.service.catalog.VolumeInfo`. + * Added `com.databricks.sdk.service.catalog.AwsIamRoleRequest` class. + * Added `com.databricks.sdk.service.catalog.AwsIamRoleResponse` class. + * Added `com.databricks.sdk.service.catalog.CreateOnlineTableRequest` class. + * Added `com.databricks.sdk.service.catalog.ListCatalogsRequest` class. + * Changed `publish()` method for `workspaceClient.lakeview()` service to return `com.databricks.sdk.service.dashboards.PublishedDashboard` class. + * Added `create()` method for `workspaceClient.lakeview()` service. + * Added `get()` method for `workspaceClient.lakeview()` service. + * Added `getPublished()` method for `workspaceClient.lakeview()` service. + * Added `trash()` method for `workspaceClient.lakeview()` service. + * Added `update()` method for `workspaceClient.lakeview()` service. + * Removed `Object` class. + * Added `com.databricks.sdk.service.dashboards.CreateDashboardRequest` class. + * Added `com.databricks.sdk.service.dashboards.Dashboard` class. + * Added `com.databricks.sdk.service.dashboards.GetDashboardRequest` class. + * Added `com.databricks.sdk.service.dashboards.GetPublishedDashboardRequest` class. + * Added `com.databricks.sdk.service.dashboards.LifecycleState` class. + * Added `com.databricks.sdk.service.dashboards.PublishedDashboard` class. + * Added `com.databricks.sdk.service.dashboards.TrashDashboardRequest` class. + * Added `Object` class. + * Added `com.databricks.sdk.service.dashboards.UpdateDashboardRequest` class. + * Added `autoCaptureConfig` field for `com.databricks.sdk.service.serving.EndpointPendingConfig`. + * Changed `get()` method for `workspaceClient.automaticClusterUpdate()` service . New request type is `com.databricks.sdk.service.settings.GetAutomaticClusterUpdateSettingRequest` class. + * Changed `get()` method for `workspaceClient.cspEnablement()` service . New request type is `com.databricks.sdk.service.settings.GetCspEnablementSettingRequest` class. + * Changed `get()` method for `accountClient.cspEnablementAccount()` service . New request type is `com.databricks.sdk.service.settings.GetCspEnablementAccountSettingRequest` class. + * Changed `delete()` method for `workspaceClient.defaultNamespace()` service . New request type is `com.databricks.sdk.service.settings.DeleteDefaultNamespaceSettingRequest` class. + * Changed `get()` method for `workspaceClient.defaultNamespace()` service . New request type is `com.databricks.sdk.service.settings.GetDefaultNamespaceSettingRequest` class. + * Changed `get()` method for `workspaceClient.esmEnablement()` service . New request type is `com.databricks.sdk.service.settings.GetEsmEnablementSettingRequest` class. + * Changed `get()` method for `accountClient.esmEnablementAccount()` service . New request type is `com.databricks.sdk.service.settings.GetEsmEnablementAccountSettingRequest` class. + * Changed `get()` method for `workspaceClient.ipAccessLists()` service . New request type is `com.databricks.sdk.service.settings.GetIpAccessList` class. + * Changed `delete()` method for `accountClient.personalCompute()` service . New request type is `com.databricks.sdk.service.settings.DeletePersonalComputeSettingRequest` class. + * Changed `get()` method for `accountClient.personalCompute()` service . New request type is `com.databricks.sdk.service.settings.GetPersonalComputeSettingRequest` class. + * Changed `delete()` method for `workspaceClient.restrictWorkspaceAdmins()` service . New request type is `com.databricks.sdk.service.settings.DeleteRestrictWorkspaceAdminsSettingRequest` class. + * Changed `get()` method for `workspaceClient.restrictWorkspaceAdmins()` service . New request type is `com.databricks.sdk.service.settings.GetRestrictWorkspaceAdminsSettingRequest` class. + * Removed `com.databricks.sdk.service.settings.DeleteDefaultNamespaceRequest` class. + * Removed `com.databricks.sdk.service.settings.DeletePersonalComputeRequest` class. + * Removed `com.databricks.sdk.service.settings.DeleteRestrictWorkspaceAdminRequest` class. + * Removed `com.databricks.sdk.service.settings.GetAutomaticClusterUpdateRequest` class. + * Removed `com.databricks.sdk.service.settings.GetCspEnablementAccountRequest` class. + * Removed `com.databricks.sdk.service.settings.GetCspEnablementRequest` class. + * Removed `com.databricks.sdk.service.settings.GetDefaultNamespaceRequest` class. + * Removed `com.databricks.sdk.service.settings.GetEsmEnablementAccountRequest` class. + * Removed `com.databricks.sdk.service.settings.GetEsmEnablementRequest` class. + * Removed `com.databricks.sdk.service.settings.GetIpAccessListRequest` class. + * Removed `com.databricks.sdk.service.settings.GetPersonalComputeRequest` class. + * Removed `com.databricks.sdk.service.settings.GetRestrictWorkspaceAdminRequest` class. + * Added `com.databricks.sdk.service.settings.DeleteDefaultNamespaceSettingRequest` class. + * Added `com.databricks.sdk.service.settings.DeletePersonalComputeSettingRequest` class. + * Added `com.databricks.sdk.service.settings.DeleteRestrictWorkspaceAdminsSettingRequest` class. + * Added `com.databricks.sdk.service.settings.GetAutomaticClusterUpdateSettingRequest` class. + * Added `com.databricks.sdk.service.settings.GetCspEnablementAccountSettingRequest` class. + * Added `com.databricks.sdk.service.settings.GetCspEnablementSettingRequest` class. + * Added `com.databricks.sdk.service.settings.GetDefaultNamespaceSettingRequest` class. + * Added `com.databricks.sdk.service.settings.GetEsmEnablementAccountSettingRequest` class. + * Added `com.databricks.sdk.service.settings.GetEsmEnablementSettingRequest` class. + * Added `com.databricks.sdk.service.settings.GetIpAccessList` class. + * Added `com.databricks.sdk.service.settings.GetPersonalComputeSettingRequest` class. + * Added `com.databricks.sdk.service.settings.GetRestrictWorkspaceAdminsSettingRequest` class. + * Changed `dataObjectType` field for `com.databricks.sdk.service.sharing.SharedDataObject` to `com.databricks.sdk.service.sharing.SharedDataObjectDataObjectType` class. + * Added `content` field for `com.databricks.sdk.service.sharing.SharedDataObject`. + * Added `com.databricks.sdk.service.sharing.SharedDataObjectDataObjectType` class. + * Added `embeddingSourceColumns` field for `com.databricks.sdk.service.vectorsearch.DirectAccessVectorIndexSpec`. + * Added `scoreThreshold` field for `com.databricks.sdk.service.vectorsearch.QueryVectorIndexRequest`. + +OpenAPI SHA: 93763b0d7ae908520c229c786fff28b8fd623261, Date: 2024-03-20 + + +## 0.20.0 + +### Features and Improvements + * Added basic support for HTTP proxies ([#241](https://github.com/databricks/databricks-sdk-java/pull/241)). + * Fixed getWorkspaceClient() for GCP ([#224](https://github.com/databricks/databricks-sdk-java/pull/224)). + * Note: Backwards incompatible changes - Settings are now nested, please see the API changes below. + +### Internal Changes + * Reading headers should be done in a case-insensitive manner ([#235](https://github.com/databricks/databricks-sdk-java/pull/235)). + * Added integration tests for the Files API ([#236](https://github.com/databricks/databricks-sdk-java/pull/236)). + * Supported subservices ([#237](https://github.com/databricks/databricks-sdk-java/pull/237)). + * Handled empty types in the Java SDK ([#239](https://github.com/databricks/databricks-sdk-java/pull/239)). + * Added tokei.rs lines of code badge ([#243](https://github.com/databricks/databricks-sdk-java/pull/243)). + * Updated SDK to latest OpenAPI spec ([#245](https://github.com/databricks/databricks-sdk-java/pull/245)). + +### API Changes: + * Added the following services: + - `workspaceClient.permissionMigration()` + - `workspaceClient.automaticClusterUpdate()` + - `workspaceClient.cspEnablement()` + - `accountClient.cspEnablementAccount()` + - `workspaceClient.defaultNamespace()` + - `workspaceClient.esmEnablement()` + - `accountClient.esmEnablementAccount()` + - `accountClient.personalCompute()` + - `workspaceClient.restrictWorkspaceAdmins()` + * Added the following classes: + - `com.databricks.sdk.service.iam.PermissionMigrationRequest` + - `com.databricks.sdk.service.iam.PermissionMigrationResponse` + - `com.databricks.sdk.service.settings.AutomaticClusterUpdateSetting` + - `com.databricks.sdk.service.settings.ClusterAutoRestartMessage` + - `com.databricks.sdk.service.settings.ClusterAutoRestartMessageEnablementDetails` + - `com.databricks.sdk.service.settings.ClusterAutoRestartMessageMaintenanceWindow` + - `com.databricks.sdk.service.settings.ClusterAutoRestartMessageMaintenanceWindowDayOfWeek` + - `com.databricks.sdk.service.settings.ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule` + - `com.databricks.sdk.service.settings.ClusterAutoRestartMessageMaintenanceWindowWeekDayFrequency` + - `com.databricks.sdk.service.settings.ClusterAutoRestartMessageMaintenanceWindowWindowStartTime` + - `com.databricks.sdk.service.settings.ComplianceStandard` + - `com.databricks.sdk.service.settings.CspEnablement` + - `com.databricks.sdk.service.settings.CspEnablementAccount` + - `com.databricks.sdk.service.settings.CspEnablementAccountSetting` + - `com.databricks.sdk.service.settings.CspEnablementSetting` + - `com.databricks.sdk.service.settings.DeleteDefaultNamespaceRequest` + - `com.databricks.sdk.service.settings.DeletePersonalComputeRequest` + - `com.databricks.sdk.service.settings.DeleteRestrictWorkspaceAdminRequest` + - `com.databricks.sdk.service.settings.EsmEnablement` + - `com.databricks.sdk.service.settings.EsmEnablementAccount` + - `com.databricks.sdk.service.settings.EsmEnablementAccountSetting` + - `com.databricks.sdk.service.settings.EsmEnablementSetting` + - `com.databricks.sdk.service.settings.GetAutomaticClusterUpdateRequest` + - `com.databricks.sdk.service.settings.GetCspEnablementAccountRequest` + - `com.databricks.sdk.service.settings.GetCspEnablementRequest` + - `com.databricks.sdk.service.settings.GetDefaultNamespaceRequest` + - `com.databricks.sdk.service.settings.GetEsmEnablementAccountRequest` + - `com.databricks.sdk.service.settings.GetEsmEnablementRequest` + - `com.databricks.sdk.service.settings.GetPersonalComputeRequest` + - `com.databricks.sdk.service.settings.GetRestrictWorkspaceAdminRequest` + - `com.databricks.sdk.service.settings.NccAwsStableIpRule` + - `com.databricks.sdk.service.settings.UpdateAutomaticClusterUpdateSettingRequest` + - `com.databricks.sdk.service.settings.UpdateCspEnablementAccountSettingRequest` + - `com.databricks.sdk.service.settings.UpdateCspEnablementSettingRequest` + - `com.databricks.sdk.service.settings.UpdateEsmEnablementAccountSettingRequest` + - `com.databricks.sdk.service.settings.UpdateEsmEnablementSettingRequest` + * Removed the follogin classes: + - `com.databricks.sdk.service.settings.DeleteDefaultNamespaceSettingRequest` + - `com.databricks.sdk.service.settings.DeletePersonalComputeSettingRequest` + - `com.databricks.sdk.service.settings.DeleteRestrictWorkspaceAdminsSettingRequest` + - `com.databricks.sdk.service.settings.GetDefaultNamespaceSettingRequest` + - `com.databricks.sdk.service.settings.GetPersonalComputeSettingRequest` + - `com.databricks.sdk.service.settings.GetRestrictWorkspaceAdminsSettingRequest` + * Changed `version` field for `com.databricks.sdk.service.serving.AppManifest` to `com.databricks.sdk.service.serving.AnyValue` class. + * Removed `deletePersonalComputeSetting()`, `getPersonalComputeSetting()` and `updatePersonalComputeSetting()` method for `accountClient.settings()` service. + * Removed `deleteDefaultNamespaceSetting()`, `deleteRestrictWorkspaceAdminsSetting()`, `getDefaultNamespaceSetting()`, `getRestrictWorkspaceAdminsSetting()`, `updateDefaultNamespaceSetting()` and `updateRestrictWorkspaceAdminsSetting()` method for `workspaceClient.settings()` service. + * Added `awsStableIpRule` field for `com.databricks.sdk.service.settings.NccEgressDefaultRules`. + * Added `indexName` field for `com.databricks.sdk.service.vectorsearch.DeleteDataVectorIndexRequest`. + * Added `embeddingModelEndpointName` field for `com.databricks.sdk.service.vectorsearch.EmbeddingSourceColumn`. + * Added `indexName` field for `com.databricks.sdk.service.vectorsearch.UpsertDataVectorIndexRequest`. + * Added `deltaSyncIndexSpec` field for `com.databricks.sdk.service.vectorsearch.VectorIndex`. + * Added `directAccessIndexSpec` field for `com.databricks.sdk.service.vectorsearch.VectorIndex`. + * Changed `deleteEndpoint()`, `createIndex()`, `deleteDataVectorIndex()` and `upsertDataVectorIndex()` method for `workspaceClient.vectorSearchEndpoints()` service with new required argument order. + * Changed `endpointName` field for `com.databricks.sdk.service.vectorsearch.CreateVectorIndexRequest` to be required. + * Removed `planningPhases` field for `com.databricks.sdk.service.sql.QueryMetrics`. + * Removed `name` field for `com.databricks.sdk.service.vectorsearch.DeleteDataVectorIndexRequest`. + * Removed `name` field for `com.databricks.sdk.service.vectorsearch.DeleteEndpointRequest`. + * Removed `com.databricks.sdk.service.vectorsearch.EmbeddingConfig` class. + * Removed `embeddingConfig` field for `com.databricks.sdk.service.vectorsearch.EmbeddingSourceColumn`. + * Removed `name` field for `com.databricks.sdk.service.vectorsearch.UpsertDataVectorIndexRequest`. + * Removed `deltaSyncVectorIndexSpec` field for `com.databricks.sdk.service.vectorsearch.VectorIndex`. + * Removed `directAccessVectorIndexSpec` field for `com.databricks.sdk.service.vectorsearch.VectorIndex`. + +OpenAPI SHA: d855b30f25a06fe84f25214efa20e7f1fffcdf9e, Date: 2024-03-04 + + ## 0.19.0 Internal Changes: diff --git a/README.md b/README.md index d4534aaf6..4ecf5586b 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,7 @@ # Databricks SDK for Java +[![lines of code](https://tokei.rs/b1/github/databricks/databricks-sdk-java)]([https://codecov.io/github/databricks/databricks-sdk-java](https://github.com/databricks/databricks-sdk-java)) + **Stability**: [Beta](https://docs.databricks.com/release-notes/release-types.html) The Databricks SDK for Java includes functionality to accelerate development with Java for the Databricks Lakehouse. It covers all public [Databricks REST API](https://docs.databricks.com/dev-tools/api/index.html) operations. The SDK's internal HTTP client is robust and handles failures on different levels by performing intelligent retries. @@ -12,6 +14,7 @@ The Databricks SDK for Java includes functionality to accelerate development wit - [Long-running operations](#long-running-operations) - [Paginated responses](#paginated-responses) - [Single-sign-on with OAuth](#single-sign-on-sso-with-oauth) +- [Error handling](#error-handling) - [Logging](#logging) - [Interface stability](#interface-stability) - [Disclaimer](#disclaimer) @@ -355,6 +358,30 @@ For applications, that do run on developer workstations, Databricks SDK for Java In order to use OAuth with Databricks SDK for Python, you should use `AccountClient.customAppIntegration().create()` API. Usage of this can be seen in the [Spring Boot example project](/examples/spring-boot-oauth-u2m-demo/src/main/java/com/databricks/sdk/App.java). +## Error Handling +The Databricks SDK for Java provides a robust error-handling mechanism that allows developers to catch and handle API errors. When an error occurs, the SDK will raise an exception that contains information about the error, such as the HTTP status code, error message, and error details. Developers can catch these exceptions and handle them appropriately in their code. + +```java +import com.databricks.sdk.WorkspaceClient; +import com.databricks.sdk.core.errors.platform.ResourceDoesNotExist; +import com.databricks.sdk.service.compute.ClusterDetails; + +public class ErrorDemo { + public static void main(String[] args) { + WorkspaceClient w = new WorkspaceClient(); + try { + ClusterDetails c = w.clusters().get("1234-5678-9012"); + } catch (ResourceDoesNotExist e) { + System.out.println("Cluster not found: " + e.getMessage()); + } + } +} +``` + +The SDK handles inconsistencies in error responses amongst the different services, providing a consistent interface for developers to work with. Simply catch the appropriate exception type and handle the error as needed. The errors returned by the Databricks API are defined in [databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform](https://github.com/databricks/databricks-sdk-java/tree/main/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform). + + + ## Logging The Databricks SDK for Java seamlessly integrates with the standard [SLF4J logging framework](https://www.slf4j.org/). This allows developers to easily enable and customize logging for their Databricks Java projects. To enable debug logging in your Databricks java project, you can add the following to your log4j.properties file: diff --git a/databricks-sdk-java/pom.xml b/databricks-sdk-java/pom.xml index ef930f892..f22a783e6 100644 --- a/databricks-sdk-java/pom.xml +++ b/databricks-sdk-java/pom.xml @@ -5,7 +5,7 @@ com.databricks databricks-sdk-parent - 0.19.0 + 0.23.0 databricks-sdk-java diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java index 550f96c30..beaa7d122 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java @@ -309,14 +309,7 @@ public AccountMetastoresAPI metastores() { /** * These APIs provide configurations for the network connectivity of your workspaces for - * serverless compute resources. This API provides stable subnets for your workspace so that you - * can configure your firewalls on your Azure Storage accounts to allow access from Databricks. - * You can also use the API to provision private endpoints for Databricks to privately connect - * serverless compute resources to your Azure resources using Azure Private Link. See [configure - * serverless secure connectivity]. - * - *

[configure serverless secure connectivity]: - * https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security + * serverless compute resources. */ public NetworkConnectivityAPI networkConnectivity() { return networkConnectivityAPI; @@ -384,16 +377,7 @@ public AccountServicePrincipalsAPI servicePrincipals() { return servicePrincipalsAPI; } - /** - * The Personal Compute enablement setting lets you control which users can use the Personal - * Compute default policy to create compute resources. By default all users in all workspaces have - * access (ON), but you can change the setting to instead let individual workspaces configure - * access control (DELEGATE). - * - *

There is only one instance of this setting per account. Since this setting has a default - * value, this setting is present on all accounts even though it's never set on a given account. - * Deletion reverts the value of the setting back to the default value. - */ + /** Accounts Settings API allows users to manage settings at the account level. */ public AccountSettingsAPI settings() { return settingsAPI; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index 764ffd2f3..0f7b1ce1b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -70,6 +70,8 @@ import com.databricks.sdk.service.iam.CurrentUserService; import com.databricks.sdk.service.iam.GroupsAPI; import com.databricks.sdk.service.iam.GroupsService; +import com.databricks.sdk.service.iam.PermissionMigrationAPI; +import com.databricks.sdk.service.iam.PermissionMigrationService; import com.databricks.sdk.service.iam.PermissionsAPI; import com.databricks.sdk.service.iam.PermissionsService; import com.databricks.sdk.service.iam.ServicePrincipalsAPI; @@ -78,6 +80,30 @@ import com.databricks.sdk.service.iam.UsersService; import com.databricks.sdk.service.jobs.JobsAPI; import com.databricks.sdk.service.jobs.JobsService; +import com.databricks.sdk.service.marketplace.ConsumerFulfillmentsAPI; +import com.databricks.sdk.service.marketplace.ConsumerFulfillmentsService; +import com.databricks.sdk.service.marketplace.ConsumerInstallationsAPI; +import com.databricks.sdk.service.marketplace.ConsumerInstallationsService; +import com.databricks.sdk.service.marketplace.ConsumerListingsAPI; +import com.databricks.sdk.service.marketplace.ConsumerListingsService; +import com.databricks.sdk.service.marketplace.ConsumerPersonalizationRequestsAPI; +import com.databricks.sdk.service.marketplace.ConsumerPersonalizationRequestsService; +import com.databricks.sdk.service.marketplace.ConsumerProvidersAPI; +import com.databricks.sdk.service.marketplace.ConsumerProvidersService; +import com.databricks.sdk.service.marketplace.ProviderExchangeFiltersAPI; +import com.databricks.sdk.service.marketplace.ProviderExchangeFiltersService; +import com.databricks.sdk.service.marketplace.ProviderExchangesAPI; +import com.databricks.sdk.service.marketplace.ProviderExchangesService; +import com.databricks.sdk.service.marketplace.ProviderFilesAPI; +import com.databricks.sdk.service.marketplace.ProviderFilesService; +import com.databricks.sdk.service.marketplace.ProviderListingsAPI; +import com.databricks.sdk.service.marketplace.ProviderListingsService; +import com.databricks.sdk.service.marketplace.ProviderPersonalizationRequestsAPI; +import com.databricks.sdk.service.marketplace.ProviderPersonalizationRequestsService; +import com.databricks.sdk.service.marketplace.ProviderProviderAnalyticsDashboardsAPI; +import com.databricks.sdk.service.marketplace.ProviderProviderAnalyticsDashboardsService; +import com.databricks.sdk.service.marketplace.ProviderProvidersAPI; +import com.databricks.sdk.service.marketplace.ProviderProvidersService; import com.databricks.sdk.service.ml.ExperimentsAPI; import com.databricks.sdk.service.ml.ExperimentsService; import com.databricks.sdk.service.ml.ModelRegistryAPI; @@ -159,6 +185,11 @@ public class WorkspaceClient { private ClustersExt clustersAPI; private CommandExecutionAPI commandExecutionAPI; private ConnectionsAPI connectionsAPI; + private ConsumerFulfillmentsAPI consumerFulfillmentsAPI; + private ConsumerInstallationsAPI consumerInstallationsAPI; + private ConsumerListingsAPI consumerListingsAPI; + private ConsumerPersonalizationRequestsAPI consumerPersonalizationRequestsAPI; + private ConsumerProvidersAPI consumerProvidersAPI; private CredentialsManagerAPI credentialsManagerAPI; private CurrentUserAPI currentUserAPI; private DashboardWidgetsAPI dashboardWidgetsAPI; @@ -185,9 +216,17 @@ public class WorkspaceClient { private ModelRegistryAPI modelRegistryAPI; private ModelVersionsAPI modelVersionsAPI; private OnlineTablesAPI onlineTablesAPI; + private PermissionMigrationAPI permissionMigrationAPI; private PermissionsAPI permissionsAPI; private PipelinesAPI pipelinesAPI; private PolicyFamiliesAPI policyFamiliesAPI; + private ProviderExchangeFiltersAPI providerExchangeFiltersAPI; + private ProviderExchangesAPI providerExchangesAPI; + private ProviderFilesAPI providerFilesAPI; + private ProviderListingsAPI providerListingsAPI; + private ProviderPersonalizationRequestsAPI providerPersonalizationRequestsAPI; + private ProviderProviderAnalyticsDashboardsAPI providerProviderAnalyticsDashboardsAPI; + private ProviderProvidersAPI providerProvidersAPI; private ProvidersAPI providersAPI; private QueriesAPI queriesAPI; private QueryHistoryAPI queryHistoryAPI; @@ -236,6 +275,11 @@ public WorkspaceClient(DatabricksConfig config) { clustersAPI = new ClustersExt(apiClient); commandExecutionAPI = new CommandExecutionAPI(apiClient); connectionsAPI = new ConnectionsAPI(apiClient); + consumerFulfillmentsAPI = new ConsumerFulfillmentsAPI(apiClient); + consumerInstallationsAPI = new ConsumerInstallationsAPI(apiClient); + consumerListingsAPI = new ConsumerListingsAPI(apiClient); + consumerPersonalizationRequestsAPI = new ConsumerPersonalizationRequestsAPI(apiClient); + consumerProvidersAPI = new ConsumerProvidersAPI(apiClient); credentialsManagerAPI = new CredentialsManagerAPI(apiClient); currentUserAPI = new CurrentUserAPI(apiClient); dashboardWidgetsAPI = new DashboardWidgetsAPI(apiClient); @@ -262,9 +306,17 @@ public WorkspaceClient(DatabricksConfig config) { modelRegistryAPI = new ModelRegistryAPI(apiClient); modelVersionsAPI = new ModelVersionsAPI(apiClient); onlineTablesAPI = new OnlineTablesAPI(apiClient); + permissionMigrationAPI = new PermissionMigrationAPI(apiClient); permissionsAPI = new PermissionsAPI(apiClient); pipelinesAPI = new PipelinesAPI(apiClient); policyFamiliesAPI = new PolicyFamiliesAPI(apiClient); + providerExchangeFiltersAPI = new ProviderExchangeFiltersAPI(apiClient); + providerExchangesAPI = new ProviderExchangesAPI(apiClient); + providerFilesAPI = new ProviderFilesAPI(apiClient); + providerListingsAPI = new ProviderListingsAPI(apiClient); + providerPersonalizationRequestsAPI = new ProviderPersonalizationRequestsAPI(apiClient); + providerProviderAnalyticsDashboardsAPI = new ProviderProviderAnalyticsDashboardsAPI(apiClient); + providerProvidersAPI = new ProviderProvidersAPI(apiClient); providersAPI = new ProvidersAPI(apiClient); queriesAPI = new QueriesAPI(apiClient); queryHistoryAPI = new QueryHistoryAPI(apiClient); @@ -443,6 +495,40 @@ public ConnectionsAPI connections() { return connectionsAPI; } + /** Fulfillments are entities that allow consumers to preview installations. */ + public ConsumerFulfillmentsAPI consumerFulfillments() { + return consumerFulfillmentsAPI; + } + + /** + * Installations are entities that allow consumers to interact with Databricks Marketplace + * listings. + */ + public ConsumerInstallationsAPI consumerInstallations() { + return consumerInstallationsAPI; + } + + /** + * Listings are the core entities in the Marketplace. They represent the products that are + * available for consumption. + */ + public ConsumerListingsAPI consumerListings() { + return consumerListingsAPI; + } + + /** + * Personalization Requests allow customers to interact with the individualized Marketplace + * listing flow. + */ + public ConsumerPersonalizationRequestsAPI consumerPersonalizationRequests() { + return consumerPersonalizationRequestsAPI; + } + + /** Providers are the entities that publish listings to the Marketplace. */ + public ConsumerProvidersAPI consumerProviders() { + return consumerProvidersAPI; + } + /** * Credentials manager interacts with with Identity Providers to to perform token exchanges using * stored credentials and refresh tokens. @@ -741,17 +827,13 @@ public LakeviewAPI lakeview() { * *

To make third-party or custom code available to notebooks and jobs running on your clusters, * you can install a library. Libraries can be written in Python, Java, Scala, and R. You can - * upload Java, Scala, and Python libraries and point to external packages in PyPI, Maven, and + * upload Python, Java, Scala and R libraries and point to external packages in PyPI, Maven, and * CRAN repositories. * *

Cluster libraries can be used by all notebooks running on a cluster. You can install a * cluster library directly from a public repository such as PyPI or Maven, using a previously * installed workspace library, or using an init script. * - *

When you install a library on a cluster, a notebook already attached to that cluster will - * not immediately see the new library. You must first detach and then reattach the notebook to - * the cluster. - * *

When you uninstall a library from a cluster, the library is removed only when you restart * the cluster. Until you restart the cluster, the status of the uninstalled library appears as * Uninstall pending restart. @@ -809,6 +891,14 @@ public OnlineTablesAPI onlineTables() { return onlineTablesAPI; } + /** + * This spec contains undocumented permission migration APIs used in + * https://github.com/databrickslabs/ucx. + */ + public PermissionMigrationAPI permissionMigration() { + return permissionMigrationAPI; + } + /** * Permissions API are used to create read, write, edit, update and manage access for various * users on different objects and endpoints. @@ -854,6 +944,9 @@ public OnlineTablesAPI onlineTables() { *

For the mapping of the required permissions for specific actions or abilities and other * important information, see [Access Control]. * + *

Note that to manage access control on service principals, use **[Account Access Control + * Proxy](:service:accountaccesscontrolproxy)**. + * *

[Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html */ public PermissionsAPI permissions() { @@ -894,6 +987,52 @@ public PolicyFamiliesAPI policyFamilies() { return policyFamiliesAPI; } + /** Marketplace exchanges filters curate which groups can access an exchange. */ + public ProviderExchangeFiltersAPI providerExchangeFilters() { + return providerExchangeFiltersAPI; + } + + /** + * Marketplace exchanges allow providers to share their listings with a curated set of customers. + */ + public ProviderExchangesAPI providerExchanges() { + return providerExchangesAPI; + } + + /** + * Marketplace offers a set of file APIs for various purposes such as preview notebooks and + * provider icons. + */ + public ProviderFilesAPI providerFiles() { + return providerFilesAPI; + } + + /** + * Listings are the core entities in the Marketplace. They represent the products that are + * available for consumption. + */ + public ProviderListingsAPI providerListings() { + return providerListingsAPI; + } + + /** + * Personalization requests are an alternate to instantly available listings. Control the + * lifecycle of personalized solutions. + */ + public ProviderPersonalizationRequestsAPI providerPersonalizationRequests() { + return providerPersonalizationRequestsAPI; + } + + /** Manage templated analytics solution for providers. */ + public ProviderProviderAnalyticsDashboardsAPI providerProviderAnalyticsDashboards() { + return providerProviderAnalyticsDashboardsAPI; + } + + /** Providers are entities that manage assets in Marketplace. */ + public ProviderProvidersAPI providerProviders() { + return providerProvidersAPI; + } + /** * A data provider is an object representing the organization in the real world who shares the * data. A provider contains shares which further contain the shared data. @@ -1057,18 +1196,7 @@ public ServingEndpointsAPI servingEndpoints() { return servingEndpointsAPI; } - /** - * The default namespace setting API allows users to configure the default namespace for a - * Databricks workspace. - * - *

Through this API, users can retrieve, set, or modify the default namespace used when queries - * do not reference a fully qualified three-level name. For example, if you use the API to set - * 'retail_prod' as the default catalog, then a query 'SELECT * FROM myTable' would reference the - * object 'retail_prod.default.myTable' (the schema 'default' is always assumed). - * - *

This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, - * the default namespace only applies when using Unity Catalog-enabled compute. - */ + /** Workspace Settings API allows users to manage settings at the workspace level. */ public SettingsAPI settings() { return settingsAPI; } @@ -1457,6 +1585,67 @@ public WorkspaceClient withConnectionsAPI(ConnectionsAPI connections) { return this; } + /** Replace the default ConsumerFulfillmentsService with a custom implementation. */ + public WorkspaceClient withConsumerFulfillmentsImpl( + ConsumerFulfillmentsService consumerFulfillments) { + return this.withConsumerFulfillmentsAPI(new ConsumerFulfillmentsAPI(consumerFulfillments)); + } + + /** Replace the default ConsumerFulfillmentsAPI with a custom implementation. */ + public WorkspaceClient withConsumerFulfillmentsAPI(ConsumerFulfillmentsAPI consumerFulfillments) { + this.consumerFulfillmentsAPI = consumerFulfillments; + return this; + } + + /** Replace the default ConsumerInstallationsService with a custom implementation. */ + public WorkspaceClient withConsumerInstallationsImpl( + ConsumerInstallationsService consumerInstallations) { + return this.withConsumerInstallationsAPI(new ConsumerInstallationsAPI(consumerInstallations)); + } + + /** Replace the default ConsumerInstallationsAPI with a custom implementation. */ + public WorkspaceClient withConsumerInstallationsAPI( + ConsumerInstallationsAPI consumerInstallations) { + this.consumerInstallationsAPI = consumerInstallations; + return this; + } + + /** Replace the default ConsumerListingsService with a custom implementation. */ + public WorkspaceClient withConsumerListingsImpl(ConsumerListingsService consumerListings) { + return this.withConsumerListingsAPI(new ConsumerListingsAPI(consumerListings)); + } + + /** Replace the default ConsumerListingsAPI with a custom implementation. */ + public WorkspaceClient withConsumerListingsAPI(ConsumerListingsAPI consumerListings) { + this.consumerListingsAPI = consumerListings; + return this; + } + + /** Replace the default ConsumerPersonalizationRequestsService with a custom implementation. */ + public WorkspaceClient withConsumerPersonalizationRequestsImpl( + ConsumerPersonalizationRequestsService consumerPersonalizationRequests) { + return this.withConsumerPersonalizationRequestsAPI( + new ConsumerPersonalizationRequestsAPI(consumerPersonalizationRequests)); + } + + /** Replace the default ConsumerPersonalizationRequestsAPI with a custom implementation. */ + public WorkspaceClient withConsumerPersonalizationRequestsAPI( + ConsumerPersonalizationRequestsAPI consumerPersonalizationRequests) { + this.consumerPersonalizationRequestsAPI = consumerPersonalizationRequests; + return this; + } + + /** Replace the default ConsumerProvidersService with a custom implementation. */ + public WorkspaceClient withConsumerProvidersImpl(ConsumerProvidersService consumerProviders) { + return this.withConsumerProvidersAPI(new ConsumerProvidersAPI(consumerProviders)); + } + + /** Replace the default ConsumerProvidersAPI with a custom implementation. */ + public WorkspaceClient withConsumerProvidersAPI(ConsumerProvidersAPI consumerProviders) { + this.consumerProvidersAPI = consumerProviders; + return this; + } + /** Replace the default CredentialsManagerService with a custom implementation. */ public WorkspaceClient withCredentialsManagerImpl(CredentialsManagerService credentialsManager) { return this.withCredentialsManagerAPI(new CredentialsManagerAPI(credentialsManager)); @@ -1743,6 +1932,18 @@ public WorkspaceClient withOnlineTablesAPI(OnlineTablesAPI onlineTables) { return this; } + /** Replace the default PermissionMigrationService with a custom implementation. */ + public WorkspaceClient withPermissionMigrationImpl( + PermissionMigrationService permissionMigration) { + return this.withPermissionMigrationAPI(new PermissionMigrationAPI(permissionMigration)); + } + + /** Replace the default PermissionMigrationAPI with a custom implementation. */ + public WorkspaceClient withPermissionMigrationAPI(PermissionMigrationAPI permissionMigration) { + this.permissionMigrationAPI = permissionMigration; + return this; + } + /** Replace the default PermissionsService with a custom implementation. */ public WorkspaceClient withPermissionsImpl(PermissionsService permissions) { return this.withPermissionsAPI(new PermissionsAPI(permissions)); @@ -1776,6 +1977,94 @@ public WorkspaceClient withPolicyFamiliesAPI(PolicyFamiliesAPI policyFamilies) { return this; } + /** Replace the default ProviderExchangeFiltersService with a custom implementation. */ + public WorkspaceClient withProviderExchangeFiltersImpl( + ProviderExchangeFiltersService providerExchangeFilters) { + return this.withProviderExchangeFiltersAPI( + new ProviderExchangeFiltersAPI(providerExchangeFilters)); + } + + /** Replace the default ProviderExchangeFiltersAPI with a custom implementation. */ + public WorkspaceClient withProviderExchangeFiltersAPI( + ProviderExchangeFiltersAPI providerExchangeFilters) { + this.providerExchangeFiltersAPI = providerExchangeFilters; + return this; + } + + /** Replace the default ProviderExchangesService with a custom implementation. */ + public WorkspaceClient withProviderExchangesImpl(ProviderExchangesService providerExchanges) { + return this.withProviderExchangesAPI(new ProviderExchangesAPI(providerExchanges)); + } + + /** Replace the default ProviderExchangesAPI with a custom implementation. */ + public WorkspaceClient withProviderExchangesAPI(ProviderExchangesAPI providerExchanges) { + this.providerExchangesAPI = providerExchanges; + return this; + } + + /** Replace the default ProviderFilesService with a custom implementation. */ + public WorkspaceClient withProviderFilesImpl(ProviderFilesService providerFiles) { + return this.withProviderFilesAPI(new ProviderFilesAPI(providerFiles)); + } + + /** Replace the default ProviderFilesAPI with a custom implementation. */ + public WorkspaceClient withProviderFilesAPI(ProviderFilesAPI providerFiles) { + this.providerFilesAPI = providerFiles; + return this; + } + + /** Replace the default ProviderListingsService with a custom implementation. */ + public WorkspaceClient withProviderListingsImpl(ProviderListingsService providerListings) { + return this.withProviderListingsAPI(new ProviderListingsAPI(providerListings)); + } + + /** Replace the default ProviderListingsAPI with a custom implementation. */ + public WorkspaceClient withProviderListingsAPI(ProviderListingsAPI providerListings) { + this.providerListingsAPI = providerListings; + return this; + } + + /** Replace the default ProviderPersonalizationRequestsService with a custom implementation. */ + public WorkspaceClient withProviderPersonalizationRequestsImpl( + ProviderPersonalizationRequestsService providerPersonalizationRequests) { + return this.withProviderPersonalizationRequestsAPI( + new ProviderPersonalizationRequestsAPI(providerPersonalizationRequests)); + } + + /** Replace the default ProviderPersonalizationRequestsAPI with a custom implementation. */ + public WorkspaceClient withProviderPersonalizationRequestsAPI( + ProviderPersonalizationRequestsAPI providerPersonalizationRequests) { + this.providerPersonalizationRequestsAPI = providerPersonalizationRequests; + return this; + } + + /** + * Replace the default ProviderProviderAnalyticsDashboardsService with a custom implementation. + */ + public WorkspaceClient withProviderProviderAnalyticsDashboardsImpl( + ProviderProviderAnalyticsDashboardsService providerProviderAnalyticsDashboards) { + return this.withProviderProviderAnalyticsDashboardsAPI( + new ProviderProviderAnalyticsDashboardsAPI(providerProviderAnalyticsDashboards)); + } + + /** Replace the default ProviderProviderAnalyticsDashboardsAPI with a custom implementation. */ + public WorkspaceClient withProviderProviderAnalyticsDashboardsAPI( + ProviderProviderAnalyticsDashboardsAPI providerProviderAnalyticsDashboards) { + this.providerProviderAnalyticsDashboardsAPI = providerProviderAnalyticsDashboards; + return this; + } + + /** Replace the default ProviderProvidersService with a custom implementation. */ + public WorkspaceClient withProviderProvidersImpl(ProviderProvidersService providerProviders) { + return this.withProviderProvidersAPI(new ProviderProvidersAPI(providerProviders)); + } + + /** Replace the default ProviderProvidersAPI with a custom implementation. */ + public WorkspaceClient withProviderProvidersAPI(ProviderProvidersAPI providerProviders) { + this.providerProvidersAPI = providerProviders; + return this; + } + /** Replace the default ProvidersService with a custom implementation. */ public WorkspaceClient withProvidersImpl(ProvidersService providers) { return this.withProvidersAPI(new ProvidersAPI(providers)); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java index 95072a4b8..6660ce571 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ApiClient.java @@ -4,6 +4,9 @@ import com.databricks.sdk.core.http.HttpClient; import com.databricks.sdk.core.http.Request; import com.databricks.sdk.core.http.Response; +import com.databricks.sdk.core.retry.RequestBasedRetryStrategyPicker; +import com.databricks.sdk.core.retry.RetryStrategy; +import com.databricks.sdk.core.retry.RetryStrategyPicker; import com.databricks.sdk.core.utils.SerDeUtils; import com.databricks.sdk.core.utils.SystemTimer; import com.databricks.sdk.core.utils.Timer; @@ -14,6 +17,8 @@ import java.io.IOException; import java.io.InputStream; import java.lang.reflect.Field; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; import java.util.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -35,7 +40,9 @@ public class ApiClient { private final HttpClient httpClient; private final BodyLogger bodyLogger; + private final RetryStrategyPicker retryStrategyPicker; private final Timer timer; + private static final String RETRY_AFTER_HEADER = "retry-after"; public ApiClient() { this(ConfigLoader.getDefault()); @@ -63,11 +70,12 @@ public ApiClient(DatabricksConfig config, Timer timer) { debugTruncateBytes = 96; } - maxAttempts = 3; + maxAttempts = 4; mapper = SerDeUtils.createMapper(); random = new Random(); httpClient = config.getHttpClient(); bodyLogger = new BodyLogger(mapper, 1024, debugTruncateBytes); + retryStrategyPicker = new RequestBasedRetryStrategyPicker(this.config); this.timer = timer; } @@ -142,6 +150,14 @@ public O GET(String path, I in, Class target, Map head } } + public O POST(String path, Class target, Map headers) { + try { + return execute(prepareRequest("POST", path, null, headers), target); + } catch (IOException e) { + throw new DatabricksException("IO error: " + e.getMessage(), e); + } + } + public O POST(String path, I in, Class target, Map headers) { try { return execute(prepareRequest("POST", path, in, headers), target); @@ -220,6 +236,7 @@ private Response getResponse(Request in) { } private Response executeInner(Request in) { + RetryStrategy retryStrategy = retryStrategyPicker.getRetryStrategy(in); int attemptNumber = 0; while (true) { attemptNumber++; @@ -249,18 +266,15 @@ private Response executeInner(Request in) { LOG.debug("Request {} failed", in, e); } - // The request is not retried under three conditions: - // 1. The request succeeded (err == null, out != null). In this case, the response is - // returned. - // 2. The request failed with a non-retriable error (err != null, out == null). - // 3. The request failed with a retriable error, but the number of attempts exceeds - // maxAttempts. - DatabricksError res = ApiErrors.checkForRetry(out, err); - if (!res.isRetriable()) { - if (res.getErrorCode() == null) { - return out; - } - throw res; + // Check if the request succeeded + if (isRequestSuccessful(out, err)) { + return out; + } + // The request did not succeed. + // Check if the request cannot be retried: if yes, retry after backoff, else throw the error. + DatabricksError databricksError = ApiErrors.getDatabricksError(out, err); + if (!retryStrategy.isRetriable(databricksError)) { + throw databricksError; } if (attemptNumber == maxAttempts) { throw new DatabricksException( @@ -268,24 +282,60 @@ private Response executeInner(Request in) { } // Retry after a backoff. - int sleepMillis = getBackoffMillis(attemptNumber); + long sleepMillis = getBackoffMillis(out, attemptNumber); LOG.debug(String.format("Retry %s in %dms", in.getRequestLine(), sleepMillis)); try { - timer.wait(sleepMillis); + timer.sleep(sleepMillis); } catch (InterruptedException ex) { Thread.currentThread().interrupt(); } } } - private int getBackoffMillis(int attemptNumber) { - int maxWait = 10000; + private boolean isRequestSuccessful(Response response, Exception e) { + return e == null && response.getStatusCode() >= 200 && response.getStatusCode() < 300; + } + + public long getBackoffMillis(Response response, int attemptNumber) { + Optional backoffMillisInResponse = getBackoffFromRetryAfterHeader(response); + if (backoffMillisInResponse.isPresent()) { + return backoffMillisInResponse.get(); + } + int minWait = 1000; // 1 second + int maxWait = 60000; // 1 minute int minJitter = 50; int maxJitter = 750; - int wait = Math.min(maxWait, attemptNumber * 1000); - wait += random.nextInt(maxJitter - minJitter + 1) + minJitter; - return wait; + int wait = Math.min(maxWait, minWait * (1 << (attemptNumber - 1))); + int jitter = random.nextInt(maxJitter - minJitter + 1) + minJitter; + return wait + jitter; + } + + public static Optional getBackoffFromRetryAfterHeader(Response response) { + if (response == null) return Optional.empty(); + List retryAfterHeader = response.getHeaders(RETRY_AFTER_HEADER); + if (retryAfterHeader == null) { + return Optional.empty(); + } + long waitTime = 0; + for (String retryAfter : retryAfterHeader) { + try { + // Datetime in header is always in GMT + ZonedDateTime retryAfterDate = + ZonedDateTime.parse(retryAfter, DateTimeFormatter.RFC_1123_DATE_TIME); + ZonedDateTime now = ZonedDateTime.now(); + waitTime = java.time.Duration.between(now, retryAfterDate).getSeconds(); + } catch (Exception e) { + // If not a date, assume it is seconds + try { + waitTime = Long.parseLong(retryAfter); + } catch (NumberFormatException nfe) { + // Just fallback to using exponential backoff + return Optional.empty(); + } + } + } + return Optional.of(waitTime * 1000); } private String makeLogRecord(Request in, Response out) { @@ -349,19 +399,24 @@ private void fillInHeaders(T target, Response response) { if (firstHeader == null) { continue; } - try { - field.setAccessible(true); - if (field.getType() == String.class) { - field.set(target, firstHeader); - } else if (field.getType() == Long.class) { - field.set(target, Long.parseLong(firstHeader)); - } else { - LOG.warn("Unsupported header type: " + field.getType()); + // Synchronize on field across all methods which alter its accessibility to ensure + // multi threaded access of these objects (e.g. in the example of concurrent creation of + // workspace clients or config resolution) are safe + synchronized (field) { + try { + field.setAccessible(true); + if (field.getType() == String.class) { + field.set(target, firstHeader); + } else if (field.getType() == Long.class) { + field.set(target, Long.parseLong(firstHeader)); + } else { + LOG.warn("Unsupported header type: " + field.getType()); + } + } catch (IllegalAccessException e) { + throw new DatabricksException("Failed to unmarshal headers: " + e.getMessage(), e); + } finally { + field.setAccessible(false); } - } catch (IllegalAccessException e) { - throw new DatabricksException("Failed to unmarshal headers: " + e.getMessage(), e); - } finally { - field.setAccessible(false); } } } @@ -380,13 +435,18 @@ public void deserialize(Response response, T object) throws IOException { Optional contentsField = getContentsField(object); if (contentsField.isPresent()) { Field field = contentsField.get(); - try { - field.setAccessible(true); - field.set(object, response.getBody()); - } catch (IllegalAccessException e) { - throw new DatabricksException("Failed to unmarshal headers: " + e.getMessage(), e); - } finally { - field.setAccessible(false); + // Synchronize on field across all methods which alter its accessibility to ensure + // multi threaded access of these objects (e.g. in the example of concurrent creation of + // workspace clients or config resolution) are safe + synchronized (field) { + try { + field.setAccessible(true); + field.set(object, response.getBody()); + } catch (IllegalAccessException e) { + throw new DatabricksException("Failed to unmarshal headers: " + e.getMessage(), e); + } finally { + field.setAccessible(false); + } } } else if (response.getBody() != null) { mapper.readerForUpdating(object).readValue(response.getBody()); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ConfigAttributeAccessor.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ConfigAttributeAccessor.java index f67bd15e7..73cb3cba2 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ConfigAttributeAccessor.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ConfigAttributeAccessor.java @@ -36,22 +36,33 @@ public String getEnv(Map getEnv) { } public void setValueOnConfig(DatabricksConfig cfg, String value) throws IllegalAccessException { - field.setAccessible(true); - if (field.getType() == String.class) { - field.set(cfg, value); - } else if (field.getType() == int.class) { - field.set(cfg, Integer.parseInt(value)); - } else if (field.getType() == boolean.class) { - field.set(cfg, Boolean.parseBoolean(value)); + // Synchronize on field across all methods which alter its accessibility to ensure + // multi threaded access of these objects (e.g. in the example of concurrent creation of + // workspace clients or config resolution) are safe + synchronized (field) { + field.setAccessible(true); + if (field.getType() == String.class) { + field.set(cfg, value); + } else if (field.getType() == int.class) { + field.set(cfg, Integer.parseInt(value)); + } else if (field.getType() == boolean.class) { + field.set(cfg, Boolean.parseBoolean(value)); + } else if (field.getType() == ProxyConfig.ProxyAuthType.class) { + if (value != null) { + field.set(cfg, ProxyConfig.ProxyAuthType.valueOf(value)); + } + } + field.setAccessible(false); } - field.setAccessible(false); } public Object getValueFromConfig(DatabricksConfig cfg) throws IllegalAccessException { - field.setAccessible(true); - Object value = field.get(cfg); - field.setAccessible(false); - return value; + synchronized (field) { + field.setAccessible(true); + Object value = field.get(cfg); + field.setAccessible(false); + return value; + } } public String getAuthType() { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java index 35c7e2d35..1c4f06389 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksConfig.java @@ -114,6 +114,24 @@ public class DatabricksConfig { @ConfigAttribute(env = "DATABRICKS_RATE_LIMIT") private Integer rateLimit; + @ConfigAttribute(env = "PROXY_HOST") + private String proxyHost; + + @ConfigAttribute(env = "PROXY_PORT") + private Integer proxyPort; + + @ConfigAttribute(env = "PROXY_USERNAME") + private String proxyUsername; + + @ConfigAttribute(env = "PROXY_PASSWORD") + private String proxyPassword; + + @ConfigAttribute(env = "PROXY_AUTH_TYPE") + private ProxyConfig.ProxyAuthType proxyAuthType; + + @ConfigAttribute(env = "USE_SYSTEM_PROPERTIES_HTTP") + private Boolean useSystemPropertiesHttp; + private volatile boolean resolved; private HeaderFactory headerFactory; @@ -156,12 +174,8 @@ private void initHttp() { if (httpClient != null) { return; } - int timeout = 300; - if (httpTimeoutSeconds != null) { - timeout = httpTimeoutSeconds; - } // eventually it'll get decoupled from config. - httpClient = new CommonsHttpClient(timeout); + httpClient = new CommonsHttpClient(this); } public synchronized Map authenticate() throws DatabricksException { @@ -462,6 +476,60 @@ public DatabricksConfig setHttpClient(HttpClient httpClient) { return this; } + public String getProxyHost() { + return proxyHost; + } + + public DatabricksConfig setProxyHost(String proxyHost) { + this.proxyHost = proxyHost; + return this; + } + + public Integer getProxyPort() { + return proxyPort; + } + + public DatabricksConfig setProxyPort(Integer proxyPort) { + this.proxyPort = proxyPort; + return this; + } + + public String getProxyUsername() { + return proxyUsername; + } + + public DatabricksConfig setProxyUsername(String proxyUsername) { + this.proxyUsername = proxyUsername; + return this; + } + + public String getProxyPassword() { + return proxyPassword; + } + + public DatabricksConfig setProxyPassword(String proxyPassword) { + this.proxyPassword = proxyPassword; + return this; + } + + public ProxyConfig.ProxyAuthType getProxyAuthType() { + return proxyAuthType; + } + + public DatabricksConfig setProxyAuthType(ProxyConfig.ProxyAuthType proxyAuthType) { + this.proxyAuthType = proxyAuthType; + return this; + } + + public Boolean getUseSystemPropertiesHttp() { + return useSystemPropertiesHttp; + } + + public DatabricksConfig setUseSystemPropertiesHttp(Boolean useSystemPropertiesHttp) { + this.useSystemPropertiesHttp = useSystemPropertiesHttp; + return this; + } + public boolean isAzure() { return this.getDatabricksEnvironment().getCloud() == Cloud.AZURE; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksError.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksError.java index 6ad5aeafc..6538b6719 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksError.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/DatabricksError.java @@ -1,15 +1,9 @@ package com.databricks.sdk.core; import com.databricks.sdk.core.error.ErrorDetail; -import java.net.ConnectException; -import java.net.SocketException; -import java.net.SocketTimeoutException; -import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * The result of checking whether {@code ApiClient} should retry a request. @@ -21,24 +15,6 @@ */ public class DatabricksError extends DatabricksException { private static final String ERROR_INFO_TYPE = "type.googleapis.com/google.rpc.ErrorInfo"; - private final Logger LOG = LoggerFactory.getLogger(getClass().getName()); - - /** Errors returned by Databricks services which are known to be retriable. */ - private static final List TRANSIENT_ERROR_STRING_MATCHES = - Arrays.asList( - "com.databricks.backend.manager.util.UnknownWorkerEnvironmentException", - "does not have any associated worker environments", - "There is no worker environment with id", - "Unknown worker environment", - "ClusterNotReadyException"); - - /** - * Exception classes thrown by Java and Java libraries in which case the request should be - * retried. - */ - private static final List> RETRYABLE_CLASSES = - Arrays.asList(SocketException.class, SocketTimeoutException.class, ConnectException.class); - private final String message; private final Throwable cause; private final String errorCode; @@ -89,48 +65,15 @@ public String getErrorCode() { return errorCode; } - int getStatusCode() { + public int getStatusCode() { return statusCode; } - public boolean isMissing() { - return statusCode == 404; - } - - public boolean isTooManyRequests() { - return statusCode == 429; - } - - public boolean isRetriable() { - if (isTooManyRequests()) { - return true; - } - for (String substring : TRANSIENT_ERROR_STRING_MATCHES) { - if (message != null && message.contains(substring)) { - LOG.debug("Attempting retry because of {}", substring); - return true; - } - } - for (Class clazz : RETRYABLE_CLASSES) { - if (isCausedBy(cause, clazz)) { - LOG.debug("Attempting retry because cause or nested cause extends {}", clazz.getName()); - return true; - } - } - return false; + public Throwable getCause() { + return cause; } List getDetailsByType(String type) { return this.details.stream().filter(e -> e.getType().equals(type)).collect(Collectors.toList()); } - - private static boolean isCausedBy(Throwable throwable, Class clazz) { - if (throwable == null) { - return false; - } - if (clazz.isInstance(throwable)) { - return true; - } - return isCausedBy(throwable.getCause(), clazz); - } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ProxyConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ProxyConfig.java new file mode 100644 index 000000000..c06985eba --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/ProxyConfig.java @@ -0,0 +1,81 @@ +package com.databricks.sdk.core; + +public class ProxyConfig { + private String host; + private Integer port; + private String username; + private String password; + private ProxyAuthType proxyAuthType; + private Boolean useSystemProperties; + + public enum ProxyAuthType { + // Currently we only support BASIC and SPNEGO + NONE, + BASIC, + // We only support kerberos for negotiate + SPNEGO + } + + public ProxyConfig(DatabricksConfig config) { + this.host = config.getProxyHost(); + this.port = config.getProxyPort(); + this.username = config.getProxyUsername(); + this.password = config.getProxyPassword(); + this.proxyAuthType = config.getProxyAuthType(); + this.useSystemProperties = config.getUseSystemPropertiesHttp(); + } + + public String getHost() { + return host; + } + + public ProxyConfig setHost(String host) { + this.host = host; + return this; + } + + public Integer getPort() { + return port; + } + + public ProxyConfig setPort(Integer port) { + this.port = port; + return this; + } + + public String getUsername() { + return username; + } + + public ProxyConfig setUsername(String username) { + this.username = username; + return this; + } + + public String getPassword() { + return password; + } + + public ProxyConfig setPassword(String password) { + this.password = password; + return this; + } + + public ProxyAuthType getProxyAuthType() { + return proxyAuthType; + } + + public ProxyConfig setProxyAuthType(ProxyAuthType proxyAuthType) { + this.proxyAuthType = proxyAuthType; + return this; + } + + public Boolean getUseSystemProperties() { + return useSystemProperties; + } + + public ProxyConfig setUseSystemProperties(Boolean useSystemProperties) { + this.useSystemProperties = useSystemProperties; + return this; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java index c7d7d23b7..176051091 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/UserAgent.java @@ -13,7 +13,7 @@ public class UserAgent { // TODO: check if reading from // /META-INF/maven/com.databricks/databrics-sdk-java/pom.properties // or getClass().getPackage().getImplementationVersion() is enough. - private static final String version = "0.19.0"; + private static final String version = "0.23.0"; public static void withProduct(String product, String productVersion) { UserAgent.product = product; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/commons/CommonsHttpClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/commons/CommonsHttpClient.java index b84a09aa4..c834e2c50 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/commons/CommonsHttpClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/commons/CommonsHttpClient.java @@ -2,11 +2,14 @@ import static org.apache.http.entity.ContentType.APPLICATION_JSON; +import com.databricks.sdk.core.DatabricksConfig; import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.ProxyConfig; import com.databricks.sdk.core.http.HttpClient; import com.databricks.sdk.core.http.Request; import com.databricks.sdk.core.http.Response; import com.databricks.sdk.core.utils.CustomCloseInputStream; +import com.databricks.sdk.core.utils.ProxyUtils; import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; @@ -41,6 +44,20 @@ public CommonsHttpClient(int timeoutSeconds) { hc = makeClosableHttpClient(); } + public CommonsHttpClient(DatabricksConfig databricksConfig) { + this( + databricksConfig.getHttpTimeoutSeconds() == null + ? 300 + : databricksConfig.getHttpTimeoutSeconds(), + new ProxyConfig(databricksConfig)); + } + + public CommonsHttpClient(int timeoutSeconds, ProxyConfig proxyConfig) { + timeout = timeoutSeconds * 1000; + connectionManager.setMaxTotal(100); + hc = makeClosableHttpClient(proxyConfig); + } + private RequestConfig makeRequestConfig() { return RequestConfig.custom() .setConnectionRequestTimeout(timeout) @@ -56,6 +73,15 @@ private CloseableHttpClient makeClosableHttpClient() { .build(); } + private CloseableHttpClient makeClosableHttpClient(ProxyConfig proxyConfig) { + HttpClientBuilder builder = + HttpClientBuilder.create() + .setConnectionManager(connectionManager) + .setDefaultRequestConfig(makeRequestConfig()); + ProxyUtils.setupProxy(proxyConfig, builder); + return builder.build(); + } + @Override public Response execute(Request in) throws IOException { HttpUriRequest request = transformRequest(in); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/AbstractErrorMapper.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/AbstractErrorMapper.java new file mode 100644 index 000000000..4142281fe --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/AbstractErrorMapper.java @@ -0,0 +1,58 @@ +package com.databricks.sdk.core.error; + +import com.databricks.sdk.core.DatabricksError; +import com.databricks.sdk.core.http.Response; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +abstract class AbstractErrorMapper { + private static final Logger LOG = LoggerFactory.getLogger(AbstractErrorMapper.class); + + @FunctionalInterface + protected interface ErrorCodeRule { + DatabricksError create(String message, List details); + } + + @FunctionalInterface + protected interface StatusCodeRule { + DatabricksError create(String errorCode, String message, List details); + } + + public DatabricksError apply(Response resp, ApiErrorBody errorBody) { + for (ErrorOverride override : ErrorOverrides.ALL_OVERRIDES) { + if (override.matches(errorBody, resp)) { + LOG.debug( + "Overriding error with {} (original status code: {}, original error code: {})", + override.getDebugName(), + resp.getStatusCode(), + errorBody.getErrorCode()); + return override.makeError(errorBody); + } + } + int code = resp.getStatusCode(); + String message = errorBody.getMessage(); + String errorCode = errorBody.getErrorCode(); + List details = errorBody.getErrorDetails(); + if (errorCodeMapping.containsKey(errorCode)) { + return errorCodeMapping.get(errorCode).create(message, details); + } + if (statusCodeMapping.containsKey(code)) { + return statusCodeMapping.get(code).create(errorCode, message, details); + } + return new DatabricksError(errorCode, message, code, details); + } + + private final Map statusCodeMapping = new HashMap<>(); + private final Map errorCodeMapping = new HashMap<>(); + + protected void statusCode(int code, StatusCodeRule rule) { + statusCodeMapping.put(code, rule); + } + + protected void errorCode(String errorCode, ErrorCodeRule rule) { + errorCodeMapping.put(errorCode, rule); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ApiErrors.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ApiErrors.java index c284fbfcf..dd2962b68 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ApiErrors.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ApiErrors.java @@ -15,8 +15,9 @@ public class ApiErrors { private static final ObjectMapper MAPPER = new ObjectMapper(); private static final Pattern HTML_ERROR_REGEX = Pattern.compile("

(.*)
"); + private static final ErrorMapper ERROR_MAPPER = new ErrorMapper(); - public static DatabricksError checkForRetry(Response out, Exception error) { + public static DatabricksError getDatabricksError(Response out, Exception error) { if (error != null) { // If the endpoint did not respond to the request, interpret the exception. return new DatabricksError("IO_ERROR", 523, error); @@ -51,11 +52,7 @@ private static DatabricksError readErrorFromResponse(Response response) { if (errorBody.getErrorDetails() == null) { errorBody.setErrorDetails(Collections.emptyList()); } - return new DatabricksError( - errorBody.getErrorCode(), - errorBody.getMessage(), - response.getStatusCode(), - errorBody.getErrorDetails()); + return ERROR_MAPPER.apply(response, errorBody); } /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ErrorMapper.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ErrorMapper.java new file mode 100644 index 000000000..1f38ceb1b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ErrorMapper.java @@ -0,0 +1,39 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.core.error; + +import com.databricks.sdk.support.Generated; + +@Generated +class ErrorMapper extends AbstractErrorMapper { + public ErrorMapper() { + statusCode(400, com.databricks.sdk.core.error.platform.BadRequest::new); + statusCode(401, com.databricks.sdk.core.error.platform.Unauthenticated::new); + statusCode(403, com.databricks.sdk.core.error.platform.PermissionDenied::new); + statusCode(404, com.databricks.sdk.core.error.platform.NotFound::new); + statusCode(409, com.databricks.sdk.core.error.platform.ResourceConflict::new); + statusCode(429, com.databricks.sdk.core.error.platform.TooManyRequests::new); + statusCode(499, com.databricks.sdk.core.error.platform.Cancelled::new); + statusCode(500, com.databricks.sdk.core.error.platform.InternalError::new); + statusCode(501, com.databricks.sdk.core.error.platform.NotImplemented::new); + statusCode(503, com.databricks.sdk.core.error.platform.TemporarilyUnavailable::new); + statusCode(504, com.databricks.sdk.core.error.platform.DeadlineExceeded::new); + + errorCode( + "INVALID_PARAMETER_VALUE", + com.databricks.sdk.core.error.platform.InvalidParameterValue::new); + errorCode( + "RESOURCE_DOES_NOT_EXIST", + com.databricks.sdk.core.error.platform.ResourceDoesNotExist::new); + errorCode("ABORTED", com.databricks.sdk.core.error.platform.Aborted::new); + errorCode("ALREADY_EXISTS", com.databricks.sdk.core.error.platform.AlreadyExists::new); + errorCode( + "RESOURCE_ALREADY_EXISTS", + com.databricks.sdk.core.error.platform.ResourceAlreadyExists::new); + errorCode("RESOURCE_EXHAUSTED", com.databricks.sdk.core.error.platform.ResourceExhausted::new); + errorCode( + "REQUEST_LIMIT_EXCEEDED", com.databricks.sdk.core.error.platform.RequestLimitExceeded::new); + errorCode("UNKNOWN", com.databricks.sdk.core.error.platform.Unknown::new); + errorCode("DATA_LOSS", com.databricks.sdk.core.error.platform.DataLoss::new); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ErrorOverride.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ErrorOverride.java new file mode 100644 index 000000000..cb35d0544 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ErrorOverride.java @@ -0,0 +1,90 @@ +package com.databricks.sdk.core.error; + +import com.databricks.sdk.core.DatabricksError; +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.http.Response; +import java.lang.reflect.Constructor; +import java.util.List; +import java.util.regex.Pattern; + +public class ErrorOverride { + private final String debugName; + private final Pattern pathRegex; + private final String verb; + private final Pattern statusCodeMatcher; + private final Pattern errorCodeMatcher; + private final Pattern messageMatcher; + private final Class customError; + + public ErrorOverride( + String debugName, + String pathRegex, + String verb, + String statusCodeMatcher, + String errorCodeMatcher, + String messageMatcher, + Class customError) { + this.debugName = debugName; + this.pathRegex = ErrorOverride.compilePattern(pathRegex); + this.verb = verb; + this.statusCodeMatcher = ErrorOverride.compilePattern(statusCodeMatcher); + this.errorCodeMatcher = ErrorOverride.compilePattern(errorCodeMatcher); + this.messageMatcher = ErrorOverride.compilePattern(messageMatcher); + this.customError = customError; + } + + public boolean matches(ApiErrorBody body, Response resp) { + if (!resp.getRequest().getMethod().equals(this.verb)) { + return false; + } + + if (this.pathRegex != null + && !this.pathRegex.matcher(resp.getRequest().getUri().getPath()).matches()) { + return false; + } + String statusCode = Integer.toString(resp.getStatusCode()); + if (this.statusCodeMatcher != null && !this.statusCodeMatcher.matcher(statusCode).matches()) { + return false; + } + if (this.errorCodeMatcher != null + && !this.errorCodeMatcher.matcher(body.getErrorCode()).matches()) { + return false; + } + // Allow matching substring of the error message. + if (this.messageMatcher != null && !this.messageMatcher.matcher(body.getMessage()).find()) { + return false; + } + return true; + } + + public String getDebugName() { + return this.debugName; + } + + public T makeError(ApiErrorBody body) { + Constructor[] constructors = this.customError.getConstructors(); + for (Constructor constructor : constructors) { + Class[] parameterTypes = constructor.getParameterTypes(); + // All errors have a 2-argument constructor for the message and the error body. + if (parameterTypes.length == 2 + && parameterTypes[0].equals(String.class) + && parameterTypes[1].equals(List.class)) { + try { + return (T) constructor.newInstance(body.getMessage(), body.getErrorDetails()); + } catch (Exception e) { + throw new DatabricksException( + "Error creating custom error for error type " + this.customError.getName(), e); + } + } + } + throw new DatabricksException( + "No suitable constructor found for error type " + this.customError.getName()); + } + + private static Pattern compilePattern(String pattern) { + if (pattern == null || pattern.isEmpty()) { + return null; + } + return Pattern.compile(pattern); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ErrorOverrides.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ErrorOverrides.java new file mode 100755 index 000000000..e269a6d99 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/ErrorOverrides.java @@ -0,0 +1,29 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.core.error; + +import com.databricks.sdk.support.Generated; +import java.util.Arrays; +import java.util.List; + +@Generated +class ErrorOverrides { + static final List> ALL_OVERRIDES = + Arrays.asList( + new ErrorOverride<>( + "Clusters InvalidParameterValue=>ResourceDoesNotExist", + "^/api/2\\.\\d/clusters/get", + "GET", + "^400$", + "INVALID_PARAMETER_VALUE", + "Cluster .* does not exist", + com.databricks.sdk.core.error.platform.ResourceDoesNotExist.class), + new ErrorOverride<>( + "Jobs InvalidParameterValue=>ResourceDoesNotExist", + "^/api/2\\.\\d/jobs/get", + "GET", + "^400$", + "INVALID_PARAMETER_VALUE", + "Job .* does not exist", + com.databricks.sdk.core.error.platform.ResourceDoesNotExist.class)); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Aborted.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Aborted.java new file mode 100755 index 000000000..bac89a9ec --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Aborted.java @@ -0,0 +1,17 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.core.error.platform; + +import com.databricks.sdk.core.error.ErrorDetail; +import com.databricks.sdk.support.Generated; +import java.util.List; + +/** + * the operation was aborted, typically due to a concurrency issue such as a sequencer check failure + */ +@Generated +public class Aborted extends ResourceConflict { + public Aborted(String message, List details) { + super("ABORTED", message, details); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/AlreadyExists.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/AlreadyExists.java new file mode 100755 index 000000000..6396f5a92 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/AlreadyExists.java @@ -0,0 +1,15 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.core.error.platform; + +import com.databricks.sdk.core.error.ErrorDetail; +import com.databricks.sdk.support.Generated; +import java.util.List; + +/** operation was rejected due a conflict with an existing resource */ +@Generated +public class AlreadyExists extends ResourceConflict { + public AlreadyExists(String message, List details) { + super("ALREADY_EXISTS", message, details); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/BadRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/BadRequest.java new file mode 100755 index 000000000..12e2dd0e8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/BadRequest.java @@ -0,0 +1,20 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.core.error.platform; + +import com.databricks.sdk.core.DatabricksError; +import com.databricks.sdk.core.error.ErrorDetail; +import com.databricks.sdk.support.Generated; +import java.util.List; + +/** the request is invalid */ +@Generated +public class BadRequest extends DatabricksError { + public BadRequest(String message, List details) { + super("BAD_REQUEST", message, 400, details); + } + + public BadRequest(String errorCode, String message, List details) { + super(errorCode, message, 400, details); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Cancelled.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Cancelled.java new file mode 100755 index 000000000..03850d441 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Cancelled.java @@ -0,0 +1,20 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.core.error.platform; + +import com.databricks.sdk.core.DatabricksError; +import com.databricks.sdk.core.error.ErrorDetail; +import com.databricks.sdk.support.Generated; +import java.util.List; + +/** the operation was explicitly canceled by the caller */ +@Generated +public class Cancelled extends DatabricksError { + public Cancelled(String message, List details) { + super("CANCELLED", message, 499, details); + } + + public Cancelled(String errorCode, String message, List details) { + super(errorCode, message, 499, details); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/DataLoss.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/DataLoss.java new file mode 100755 index 000000000..61b39e36f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/DataLoss.java @@ -0,0 +1,15 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.core.error.platform; + +import com.databricks.sdk.core.error.ErrorDetail; +import com.databricks.sdk.support.Generated; +import java.util.List; + +/** unrecoverable data loss or corruption */ +@Generated +public class DataLoss extends InternalError { + public DataLoss(String message, List details) { + super("DATA_LOSS", message, details); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/DeadlineExceeded.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/DeadlineExceeded.java new file mode 100755 index 000000000..79c011643 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/DeadlineExceeded.java @@ -0,0 +1,20 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.core.error.platform; + +import com.databricks.sdk.core.DatabricksError; +import com.databricks.sdk.core.error.ErrorDetail; +import com.databricks.sdk.support.Generated; +import java.util.List; + +/** the deadline expired before the operation could complete */ +@Generated +public class DeadlineExceeded extends DatabricksError { + public DeadlineExceeded(String message, List details) { + super("DEADLINE_EXCEEDED", message, 504, details); + } + + public DeadlineExceeded(String errorCode, String message, List details) { + super(errorCode, message, 504, details); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/InternalError.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/InternalError.java new file mode 100755 index 000000000..db70ccb69 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/InternalError.java @@ -0,0 +1,20 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.core.error.platform; + +import com.databricks.sdk.core.DatabricksError; +import com.databricks.sdk.core.error.ErrorDetail; +import com.databricks.sdk.support.Generated; +import java.util.List; + +/** some invariants expected by the underlying system have been broken */ +@Generated +public class InternalError extends DatabricksError { + public InternalError(String message, List details) { + super("INTERNAL_ERROR", message, 500, details); + } + + public InternalError(String errorCode, String message, List details) { + super(errorCode, message, 500, details); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/InvalidParameterValue.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/InvalidParameterValue.java new file mode 100755 index 000000000..4f4fb3757 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/InvalidParameterValue.java @@ -0,0 +1,15 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.core.error.platform; + +import com.databricks.sdk.core.error.ErrorDetail; +import com.databricks.sdk.support.Generated; +import java.util.List; + +/** supplied value for a parameter was invalid */ +@Generated +public class InvalidParameterValue extends BadRequest { + public InvalidParameterValue(String message, List details) { + super("INVALID_PARAMETER_VALUE", message, details); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/NotFound.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/NotFound.java new file mode 100755 index 000000000..30939c6d9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/NotFound.java @@ -0,0 +1,20 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.core.error.platform; + +import com.databricks.sdk.core.DatabricksError; +import com.databricks.sdk.core.error.ErrorDetail; +import com.databricks.sdk.support.Generated; +import java.util.List; + +/** the operation was performed on a resource that does not exist */ +@Generated +public class NotFound extends DatabricksError { + public NotFound(String message, List details) { + super("NOT_FOUND", message, 404, details); + } + + public NotFound(String errorCode, String message, List details) { + super(errorCode, message, 404, details); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/NotImplemented.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/NotImplemented.java new file mode 100755 index 000000000..4761ee9ef --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/NotImplemented.java @@ -0,0 +1,20 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.core.error.platform; + +import com.databricks.sdk.core.DatabricksError; +import com.databricks.sdk.core.error.ErrorDetail; +import com.databricks.sdk.support.Generated; +import java.util.List; + +/** the operation is not implemented or is not supported/enabled in this service */ +@Generated +public class NotImplemented extends DatabricksError { + public NotImplemented(String message, List details) { + super("NOT_IMPLEMENTED", message, 501, details); + } + + public NotImplemented(String errorCode, String message, List details) { + super(errorCode, message, 501, details); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/PermissionDenied.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/PermissionDenied.java new file mode 100755 index 000000000..96dddf2f7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/PermissionDenied.java @@ -0,0 +1,20 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.core.error.platform; + +import com.databricks.sdk.core.DatabricksError; +import com.databricks.sdk.core.error.ErrorDetail; +import com.databricks.sdk.support.Generated; +import java.util.List; + +/** the caller does not have permission to execute the specified operation */ +@Generated +public class PermissionDenied extends DatabricksError { + public PermissionDenied(String message, List details) { + super("PERMISSION_DENIED", message, 403, details); + } + + public PermissionDenied(String errorCode, String message, List details) { + super(errorCode, message, 403, details); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/RequestLimitExceeded.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/RequestLimitExceeded.java new file mode 100755 index 000000000..4ac0bd2b0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/RequestLimitExceeded.java @@ -0,0 +1,15 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.core.error.platform; + +import com.databricks.sdk.core.error.ErrorDetail; +import com.databricks.sdk.support.Generated; +import java.util.List; + +/** cluster request was rejected because it would exceed a resource limit */ +@Generated +public class RequestLimitExceeded extends TooManyRequests { + public RequestLimitExceeded(String message, List details) { + super("REQUEST_LIMIT_EXCEEDED", message, details); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/ResourceAlreadyExists.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/ResourceAlreadyExists.java new file mode 100755 index 000000000..5ca274901 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/ResourceAlreadyExists.java @@ -0,0 +1,15 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.core.error.platform; + +import com.databricks.sdk.core.error.ErrorDetail; +import com.databricks.sdk.support.Generated; +import java.util.List; + +/** operation was rejected due a conflict with an existing resource */ +@Generated +public class ResourceAlreadyExists extends ResourceConflict { + public ResourceAlreadyExists(String message, List details) { + super("RESOURCE_ALREADY_EXISTS", message, details); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/ResourceConflict.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/ResourceConflict.java new file mode 100755 index 000000000..8530091a8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/ResourceConflict.java @@ -0,0 +1,20 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.core.error.platform; + +import com.databricks.sdk.core.DatabricksError; +import com.databricks.sdk.core.error.ErrorDetail; +import com.databricks.sdk.support.Generated; +import java.util.List; + +/** maps to all HTTP 409 (Conflict) responses */ +@Generated +public class ResourceConflict extends DatabricksError { + public ResourceConflict(String message, List details) { + super("RESOURCE_CONFLICT", message, 409, details); + } + + public ResourceConflict(String errorCode, String message, List details) { + super(errorCode, message, 409, details); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/ResourceDoesNotExist.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/ResourceDoesNotExist.java new file mode 100755 index 000000000..23952007f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/ResourceDoesNotExist.java @@ -0,0 +1,15 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.core.error.platform; + +import com.databricks.sdk.core.error.ErrorDetail; +import com.databricks.sdk.support.Generated; +import java.util.List; + +/** operation was performed on a resource that does not exist */ +@Generated +public class ResourceDoesNotExist extends NotFound { + public ResourceDoesNotExist(String message, List details) { + super("RESOURCE_DOES_NOT_EXIST", message, details); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/ResourceExhausted.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/ResourceExhausted.java new file mode 100755 index 000000000..4652c9a3d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/ResourceExhausted.java @@ -0,0 +1,15 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.core.error.platform; + +import com.databricks.sdk.core.error.ErrorDetail; +import com.databricks.sdk.support.Generated; +import java.util.List; + +/** operation is rejected due to per-user rate limiting */ +@Generated +public class ResourceExhausted extends TooManyRequests { + public ResourceExhausted(String message, List details) { + super("RESOURCE_EXHAUSTED", message, details); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/TemporarilyUnavailable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/TemporarilyUnavailable.java new file mode 100755 index 000000000..111b909cc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/TemporarilyUnavailable.java @@ -0,0 +1,20 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.core.error.platform; + +import com.databricks.sdk.core.DatabricksError; +import com.databricks.sdk.core.error.ErrorDetail; +import com.databricks.sdk.support.Generated; +import java.util.List; + +/** the service is currently unavailable */ +@Generated +public class TemporarilyUnavailable extends DatabricksError { + public TemporarilyUnavailable(String message, List details) { + super("TEMPORARILY_UNAVAILABLE", message, 503, details); + } + + public TemporarilyUnavailable(String errorCode, String message, List details) { + super(errorCode, message, 503, details); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/TooManyRequests.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/TooManyRequests.java new file mode 100755 index 000000000..d8b7bf611 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/TooManyRequests.java @@ -0,0 +1,20 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.core.error.platform; + +import com.databricks.sdk.core.DatabricksError; +import com.databricks.sdk.core.error.ErrorDetail; +import com.databricks.sdk.support.Generated; +import java.util.List; + +/** maps to HTTP code: 429 Too Many Requests */ +@Generated +public class TooManyRequests extends DatabricksError { + public TooManyRequests(String message, List details) { + super("TOO_MANY_REQUESTS", message, 429, details); + } + + public TooManyRequests(String errorCode, String message, List details) { + super(errorCode, message, 429, details); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Unauthenticated.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Unauthenticated.java new file mode 100755 index 000000000..5d191b3e6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Unauthenticated.java @@ -0,0 +1,20 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.core.error.platform; + +import com.databricks.sdk.core.DatabricksError; +import com.databricks.sdk.core.error.ErrorDetail; +import com.databricks.sdk.support.Generated; +import java.util.List; + +/** the request does not have valid authentication (AuthN) credentials for the operation */ +@Generated +public class Unauthenticated extends DatabricksError { + public Unauthenticated(String message, List details) { + super("UNAUTHENTICATED", message, 401, details); + } + + public Unauthenticated(String errorCode, String message, List details) { + super(errorCode, message, 401, details); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Unknown.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Unknown.java new file mode 100755 index 000000000..8e5096bed --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/error/platform/Unknown.java @@ -0,0 +1,15 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.core.error.platform; + +import com.databricks.sdk.core.error.ErrorDetail; +import com.databricks.sdk.support.Generated; +import java.util.List; + +/** this error is used as a fallback if the platform-side mapping is missing some reason */ +@Generated +public class Unknown extends InternalError { + public Unknown(String message, List details) { + super("UNKNOWN", message, details); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/Encoding.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/Encoding.java new file mode 100644 index 000000000..fc2fea902 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/http/Encoding.java @@ -0,0 +1,102 @@ +package com.databricks.sdk.core.http; + +import java.nio.ByteBuffer; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.util.BitSet; + +/** + * Utility class for encoding strings for use in URLs. + * + *

Adapted from URLEncodingUtils.java from Apache's HttpClient library. + */ +public class Encoding { + + /** + * Unreserved characters, i.e. alphanumeric, plus: {@code _ - ! . ~ ' ( ) *} + * + *

This list is the same as the {@code unreserved} list in RFC 2396 + */ + private static final BitSet UNRESERVED = new BitSet(256); + + /** + * Characters which are safe to use in a path, excluding /, i.e. {@link #UNRESERVED} plus + * punctuation plus @ + */ + private static final BitSet PATHSAFE = new BitSet(256); + + /** Characters which are safe to use in a path, including /. */ + private static final BitSet PATH_SPECIAL = new BitSet(256); + + static { + // unreserved chars + // alpha characters + for (int i = 'a'; i <= 'z'; i++) { + UNRESERVED.set(i); + } + for (int i = 'A'; i <= 'Z'; i++) { + UNRESERVED.set(i); + } + // numeric characters + for (int i = '0'; i <= '9'; i++) { + UNRESERVED.set(i); + } + UNRESERVED.set('_'); // these are the characters of the "mark" list + UNRESERVED.set('-'); + UNRESERVED.set('.'); + UNRESERVED.set('*'); + UNRESERVED.set('!'); + UNRESERVED.set('~'); + UNRESERVED.set('\''); + UNRESERVED.set('('); + UNRESERVED.set(')'); + + // URL path safe + PATHSAFE.or(UNRESERVED); + PATHSAFE.set(';'); // param separator + PATHSAFE.set(':'); // RFC 2396 + PATHSAFE.set('@'); + PATHSAFE.set('&'); + PATHSAFE.set('='); + PATHSAFE.set('+'); + PATHSAFE.set('$'); + PATHSAFE.set(','); + + PATH_SPECIAL.or(PATHSAFE); + PATH_SPECIAL.set('/'); + } + + private static final int RADIX = 16; + + private static String urlEncode( + final String content, + final Charset charset, + final BitSet safechars, + final boolean blankAsPlus) { + if (content == null) { + return null; + } + final StringBuilder buf = new StringBuilder(); + final ByteBuffer bb = charset.encode(content); + while (bb.hasRemaining()) { + final int b = bb.get() & 0xff; + if (safechars.get(b)) { + buf.append((char) b); + } else if (blankAsPlus && b == ' ') { + buf.append('+'); + } else { + buf.append("%"); + final char hex1 = Character.toUpperCase(Character.forDigit((b >> 4) & 0xF, RADIX)); + final char hex2 = Character.toUpperCase(Character.forDigit(b & 0xF, RADIX)); + buf.append(hex1); + buf.append(hex2); + } + } + return buf.toString(); + } + + public static String encodeMultiSegmentPathParameter(String param) { + return urlEncode(param, StandardCharsets.UTF_8, PATH_SPECIAL, false); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/IdempotentRequestRetryStrategy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/IdempotentRequestRetryStrategy.java new file mode 100644 index 000000000..8f60bf04e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/IdempotentRequestRetryStrategy.java @@ -0,0 +1,56 @@ +package com.databricks.sdk.core.retry; + +import com.databricks.sdk.core.DatabricksError; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +/** + * This class is used to determine if an idempotent request should be retried. An idempotent request + * should always be retried except if the error is non-recoverable.. + */ +public class IdempotentRequestRetryStrategy implements RetryStrategy { + + private static final Set> NON_RETRIABLE_EXCEPTIONS = + new HashSet<>( + Arrays.asList( + IllegalArgumentException.class, + IllegalStateException.class, + UnsupportedOperationException.class, + IndexOutOfBoundsException.class, + NullPointerException.class, + ClassCastException.class, + NumberFormatException.class, + ArrayIndexOutOfBoundsException.class, + ArrayStoreException.class, + ArithmeticException.class, + NegativeArraySizeException.class)); + + private static final Set NON_RETRIABLE_HTTP_CODES = + new HashSet<>(Arrays.asList(400, 401, 403, 404, 405, 409, 410, 411, 412, 413, 414, 415, 416)); + + @Override + public boolean isRetriable(DatabricksError databricksError) { + if (RetryUtils.isCausedByTransientError(databricksError)) { + return true; + } + if (isNonRetriableException(databricksError)) { + return false; + } + if (isNonRetriableHttpCode(databricksError)) { + return false; + } + return true; + } + + private boolean isNonRetriableException(DatabricksError databricksError) { + if (databricksError.getCause() == null) { + return false; + } + return NON_RETRIABLE_EXCEPTIONS.contains(databricksError.getCause().getClass()); + } + + private boolean isNonRetriableHttpCode(DatabricksError databricksError) { + return NON_RETRIABLE_HTTP_CODES.contains(databricksError.getStatusCode()); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/NonIdempotentRequestRetryStrategy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/NonIdempotentRequestRetryStrategy.java new file mode 100644 index 000000000..db4740700 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/NonIdempotentRequestRetryStrategy.java @@ -0,0 +1,69 @@ +package com.databricks.sdk.core.retry; + +import com.databricks.sdk.core.DatabricksError; +import java.net.*; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * This class is used to determine if a non-idempotent request should be retried. We essentially + * want to ensure that any request that could have potentially been processed by the server is not + * retried. + */ +public class NonIdempotentRequestRetryStrategy implements RetryStrategy { + private final Logger LOGGER = LoggerFactory.getLogger(getClass().getName()); + + private static final List> RETRIABLE_CLASSES = + Arrays.asList( + ConnectException.class, + UnknownHostException.class, + NoRouteToHostException.class, + PortUnreachableException.class); + + private static final Set RETRIABLE_HTTP_CODES = + new HashSet<>( + Arrays.asList( + /* Too many requests */ 429, /* Request not processed by server */ 501, 503)); + + @Override + public boolean isRetriable(DatabricksError databricksError) { + if (RetryUtils.isCausedByTransientError(databricksError)) { + return true; + } + if (isClientSideException(databricksError)) { + return true; + } + if (isRetriableHttpErrorCode(databricksError)) { + return true; + } + return false; + } + + private boolean isRetriableHttpErrorCode(DatabricksError databricksError) { + return RETRIABLE_HTTP_CODES.contains(databricksError.getStatusCode()); + } + + private boolean isClientSideException(DatabricksError error) { + for (Class clazz : RETRIABLE_CLASSES) { + if (isCausedBy(error.getCause(), clazz)) { + LOGGER.debug("Attempting retry because cause or nested cause extends {}", clazz.getName()); + return true; + } + } + return false; + } + + private static boolean isCausedBy(Throwable throwable, Class clazz) { + if (throwable == null) { + return false; + } + if (clazz.isInstance(throwable)) { + return true; + } + return isCausedBy(throwable.getCause(), clazz); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/RequestBasedRetryStrategyPicker.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/RequestBasedRetryStrategyPicker.java new file mode 100644 index 000000000..8b4105c05 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/RequestBasedRetryStrategyPicker.java @@ -0,0 +1,83 @@ +package com.databricks.sdk.core.retry; + +import com.databricks.sdk.core.DatabricksConfig; +import com.databricks.sdk.core.http.Request; +import java.util.AbstractMap; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +/** + * A RetryStrategyPicker that selects a retry strategy based on whether the request is idempotent or + * not. + */ +public class RequestBasedRetryStrategyPicker implements RetryStrategyPicker { + private static final List IDEMPOTENT_REQUESTS = + Arrays.asList( + // Create a new session v1.0 + new Request("POST", "/api/2.0/sql/statements/sessions/"), + // Create a new session v2.0 + new Request("POST", "/api/2.0/sql/sessions/"), + // Delete an existing session v1.0 + new Request("DELETE", "/api/2.0/sql/statements/sessions/.*"), + // Delete an existing session v2.0 + new Request("DELETE", "/api/2.0/sql/sessions/.*"), + // Get status of a statement + new Request("GET", "/api/2.0/sql/statements/.*"), + // Close a statement + new Request("DELETE", "/api/2.0/sql/statements/.*"), + // Fetch a chunk of a statement result + new Request("GET", "/api/2.0/sql/statements/.*/result/chunks/.*")); + + private final List> idempotentRequestsPattern; + private static final NonIdempotentRequestRetryStrategy NON_IDEMPOTENT_RETRY_STRATEGY = + new NonIdempotentRequestRetryStrategy(); + private static final IdempotentRequestRetryStrategy IDEMPOTENT_RETRY_STRATEGY = + new IdempotentRequestRetryStrategy(); + + public RequestBasedRetryStrategyPicker(DatabricksConfig config) { + this.idempotentRequestsPattern = + IDEMPOTENT_REQUESTS.stream() + .map( + request -> + new AbstractMap.SimpleEntry<>( + request.getMethod(), + Pattern.compile( + config.getHost() + request.getUrl(), Pattern.CASE_INSENSITIVE))) + .collect(Collectors.toList()); + } + + /** + * This function gets the retry strategy for a given request based on whether the request is + * idempotent or not. + * + * @param request to get the retry strategy for + * @return the retry strategy for the given request + */ + @Override + public RetryStrategy getRetryStrategy(Request request) { + if (isIdempotentRequest(request)) { + return IDEMPOTENT_RETRY_STRATEGY; + } else { + return NON_IDEMPOTENT_RETRY_STRATEGY; + } + } + + /** + * This function checks if a given request is idempotent. + * + * @param request to check if it is idempotent + * @return true if the request is idempotent, false otherwise + */ + private boolean isIdempotentRequest(Request request) { + for (Map.Entry idempotentRequest : idempotentRequestsPattern) { + if (idempotentRequest.getKey().equals(request.getMethod()) + && idempotentRequest.getValue().matcher(request.getUrl()).find()) { + return true; + } + } + return false; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/RetryStrategy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/RetryStrategy.java new file mode 100644 index 000000000..0c80421f6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/RetryStrategy.java @@ -0,0 +1,14 @@ +package com.databricks.sdk.core.retry; + +import com.databricks.sdk.core.DatabricksError; + +/** This interface is used to determine if a request should be retried. */ +public interface RetryStrategy { + /** + * This method is used to determine if a request should be retried. + * + * @param databricksError The DatabricksError wrapped response/error object. + * @return true if the request should be retried, false otherwise. + */ + boolean isRetriable(DatabricksError databricksError); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/RetryStrategyPicker.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/RetryStrategyPicker.java new file mode 100644 index 000000000..9b3cfaf27 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/RetryStrategyPicker.java @@ -0,0 +1,14 @@ +package com.databricks.sdk.core.retry; + +import com.databricks.sdk.core.http.Request; + +/** This interface is used to pick the appropriate retry strategy for a given request. */ +public interface RetryStrategyPicker { + /** + * This method is used to get the retry strategy for a given request. + * + * @param request The request for which the retry strategy is needed. + * @return The retry strategy for the given request. + */ + RetryStrategy getRetryStrategy(Request request); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/RetryUtils.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/RetryUtils.java new file mode 100644 index 000000000..ce7358a9e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/retry/RetryUtils.java @@ -0,0 +1,25 @@ +package com.databricks.sdk.core.retry; + +import com.databricks.sdk.core.DatabricksError; +import java.util.Arrays; +import java.util.List; + +public class RetryUtils { + private static final List TRANSIENT_ERROR_STRING_MATCHES = + Arrays.asList( + "com.databricks.backend.manager.util.UnknownWorkerEnvironmentException", + "does not have any associated worker environments", + "There is no worker environment with id", + "Unknown worker environment", + "ClusterNotReadyException"); + + public static boolean isCausedByTransientError(DatabricksError databricksError) { + String message = databricksError.getMessage(); + for (String match : TRANSIENT_ERROR_STRING_MATCHES) { + if (message != null && message.contains(match)) { + return true; + } + } + return false; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/utils/ProxyUtils.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/utils/ProxyUtils.java new file mode 100644 index 000000000..57da54273 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/utils/ProxyUtils.java @@ -0,0 +1,143 @@ +package com.databricks.sdk.core.utils; + +import com.databricks.sdk.core.DatabricksException; +import com.databricks.sdk.core.ProxyConfig; +import java.security.Principal; +import org.apache.http.HttpHost; +import org.apache.http.auth.AuthSchemeProvider; +import org.apache.http.auth.AuthScope; +import org.apache.http.auth.Credentials; +import org.apache.http.auth.UsernamePasswordCredentials; +import org.apache.http.client.CredentialsProvider; +import org.apache.http.client.config.AuthSchemes; +import org.apache.http.config.RegistryBuilder; +import org.apache.http.impl.auth.SPNegoSchemeFactory; +import org.apache.http.impl.client.BasicCredentialsProvider; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.impl.client.ProxyAuthenticationStrategy; + +/** + * This class is used to setup the proxy configs for the http client. This includes setting up the + * proxy host, port, and authentication. + */ +public class ProxyUtils { + + /** + * Setup the proxy configuration in the http client builder. + * + * @param config the proxy configuration + * @param builder the http client builder + */ + public static void setupProxy(ProxyConfig config, HttpClientBuilder builder) { + String proxyHost = null; + Integer proxyPort = null; + String proxyUser = null; + String proxyPassword = null; + if (config.getUseSystemProperties() != null && config.getUseSystemProperties()) { + builder.useSystemProperties(); + String protocol = System.getProperty("https.proxyHost") != null ? "https" : "http"; + proxyHost = System.getProperty(protocol + ".proxyHost"); + proxyPort = Integer.parseInt(System.getProperty(protocol + ".proxyPort")); + proxyUser = System.getProperty(protocol + ".proxyUser"); + proxyPassword = System.getProperty(protocol + ".proxyPassword"); + } + // Override system properties if proxy configuration is explicitly set + if (config.getHost() != null) { + proxyHost = config.getHost(); + proxyPort = config.getPort(); + proxyUser = config.getUsername(); + proxyPassword = config.getPassword(); + builder.setProxy(new HttpHost(proxyHost, proxyPort)); + } + setupProxyAuth( + proxyHost, proxyPort, config.getProxyAuthType(), proxyUser, proxyPassword, builder); + } + + /** + * This method sets up the proxy authentication in the http client builder. + * + * @param proxyHost the proxy host + * @param proxyPort the proxy port + * @param proxyAuthType the proxy authentication type + * @param proxyUser the proxy user + * @param proxyPassword the proxy password + * @param builder the http client builder + */ + public static void setupProxyAuth( + String proxyHost, + Integer proxyPort, + ProxyConfig.ProxyAuthType proxyAuthType, + String proxyUser, + String proxyPassword, + HttpClientBuilder builder) { + if (proxyAuthType == null) { + return; + } + AuthScope authScope = new AuthScope(proxyHost, proxyPort); + switch (proxyAuthType) { + case NONE: + break; + case BASIC: + setupBasicProxyAuth(builder, authScope, proxyUser, proxyPassword); + break; + case SPNEGO: + setupNegotiateProxyAuth(builder, authScope); + break; + default: + throw new DatabricksException("Unknown proxy auth type: " + proxyAuthType); + } + } + + /** + * This method sets up the proxy authentication using the negotiate mechanism in the http client + * builder. + * + * @param builder the http client builder + * @param authScope the authentication scope + */ + public static void setupNegotiateProxyAuth(HttpClientBuilder builder, AuthScope authScope) { + // We only support kerberos for negotiate as of now + System.setProperty("javax.security.auth.useSubjectCredsOnly", "false"); + // "java.security.krb5.conf" system property needs to be set if krb5.conf is not in the default + // location + // Use "sun.security.krb5.debug" and "sun.security.jgss.debug" system properties for debugging + Credentials useJaasCreds = + new Credentials() { + public String getPassword() { + return null; + } + + public Principal getUserPrincipal() { + return null; + } + }; + + CredentialsProvider credsProvider = new BasicCredentialsProvider(); + credsProvider.setCredentials(authScope, useJaasCreds); + builder + .setDefaultCredentialsProvider(credsProvider) + .setDefaultAuthSchemeRegistry( + RegistryBuilder.create() + .register(AuthSchemes.SPNEGO, new SPNegoSchemeFactory(true)) + .build()); + } + + /** + * This method sets up the proxy authentication using the basic mechanism credentials provided + * into the http client builder. + * + * @param builder the http client builder + * @param authScope the authentication scope + * @param proxyUser the proxy user + * @param proxyPassword the proxy password + */ + public static void setupBasicProxyAuth( + HttpClientBuilder builder, AuthScope authScope, String proxyUser, String proxyPassword) { + CredentialsProvider credsProvider = new BasicCredentialsProvider(); + credsProvider.setCredentials( + authScope, new UsernamePasswordCredentials(proxyUser, proxyPassword)); + builder + .setDefaultCredentialsProvider(credsProvider) + .setProxyAuthenticationStrategy(new ProxyAuthenticationStrategy()); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/utils/SystemTimer.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/utils/SystemTimer.java index 9187d18c6..728df59e4 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/utils/SystemTimer.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/utils/SystemTimer.java @@ -2,7 +2,7 @@ public class SystemTimer implements Timer { @Override - public void wait(int milliseconds) throws InterruptedException { + public void sleep(long milliseconds) throws InterruptedException { Thread.sleep(milliseconds); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/utils/Timer.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/utils/Timer.java index d2a5c97bf..7e95872fa 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/core/utils/Timer.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/core/utils/Timer.java @@ -1,7 +1,7 @@ package com.databricks.sdk.core.utils; public interface Timer { - void wait(int milliseconds) throws InterruptedException; + void sleep(long milliseconds) throws InterruptedException; long getCurrentTime(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsAPI.java index 70add4e35..4de8a1314 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/BudgetsAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -73,7 +74,7 @@ public WrappedBudgetWithStatus get(GetBudgetRequest request) { * that the budget is configured to include. */ public Iterable list() { - return impl.list().getBudgets(); + return new Paginator<>(null, (Void v) -> impl.list(), BudgetList::getBudgets, response -> null); } public void update(String budgetId, Budget budget) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java index 574be8ed5..d00bca784 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/LogDeliveryAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -128,7 +129,11 @@ public WrappedLogDeliveryConfiguration get(GetLogDeliveryRequest request) { *

Gets all Databricks log delivery configurations associated with an account specified by ID. */ public Iterable list(ListLogDeliveryRequest request) { - return impl.list(request).getLogDeliveryConfigurations(); + return new Paginator<>( + request, + impl::list, + WrappedLogDeliveryConfigurations::getLogDeliveryConfigurations, + response -> null); } public void patchStatus(String logDeliveryConfigurationId, LogDeliveryConfigStatus status) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsAPI.java index 502048323..68a2c8e08 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -80,7 +81,11 @@ public Iterable list(String metastoreId) { *

Gets a list of all Databricks workspace IDs that have been assigned to given metastore. */ public Iterable list(ListAccountMetastoreAssignmentsRequest request) { - return impl.list(request).getWorkspaceIds(); + return new Paginator<>( + request, + impl::list, + ListAccountMetastoreAssignmentsResponse::getWorkspaceIds, + response -> null); } public void update(long workspaceId, String metastoreId) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresAPI.java index bc97167b9..3ed7e76b4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoresAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -67,7 +68,8 @@ public AccountsMetastoreInfo get(GetAccountMetastoreRequest request) { *

Gets all Unity Catalog metastores associated with an account specified by ID. */ public Iterable list() { - return impl.list().getMetastores(); + return new Paginator<>( + null, (Void v) -> impl.list(), ListMetastoresResponse::getMetastores, response -> null); } public AccountsMetastoreInfo update(String metastoreId) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleRequest.java new file mode 100755 index 000000000..2eac19555 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleRequest.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AwsIamRoleRequest { + /** The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access. */ + @JsonProperty("role_arn") + private String roleArn; + + public AwsIamRoleRequest setRoleArn(String roleArn) { + this.roleArn = roleArn; + return this; + } + + public String getRoleArn() { + return roleArn; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AwsIamRoleRequest that = (AwsIamRoleRequest) o; + return Objects.equals(roleArn, that.roleArn); + } + + @Override + public int hashCode() { + return Objects.hash(roleArn); + } + + @Override + public String toString() { + return new ToStringer(AwsIamRoleRequest.class).add("roleArn", roleArn).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRole.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleResponse.java similarity index 84% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRole.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleResponse.java index 8b07a85cb..50420d04a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRole.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleResponse.java @@ -8,7 +8,7 @@ import java.util.Objects; @Generated -public class AwsIamRole { +public class AwsIamRoleResponse { /** The external ID used in role assumption to prevent confused deputy problem.. */ @JsonProperty("external_id") private String externalId; @@ -24,7 +24,7 @@ public class AwsIamRole { @JsonProperty("unity_catalog_iam_arn") private String unityCatalogIamArn; - public AwsIamRole setExternalId(String externalId) { + public AwsIamRoleResponse setExternalId(String externalId) { this.externalId = externalId; return this; } @@ -33,7 +33,7 @@ public String getExternalId() { return externalId; } - public AwsIamRole setRoleArn(String roleArn) { + public AwsIamRoleResponse setRoleArn(String roleArn) { this.roleArn = roleArn; return this; } @@ -42,7 +42,7 @@ public String getRoleArn() { return roleArn; } - public AwsIamRole setUnityCatalogIamArn(String unityCatalogIamArn) { + public AwsIamRoleResponse setUnityCatalogIamArn(String unityCatalogIamArn) { this.unityCatalogIamArn = unityCatalogIamArn; return this; } @@ -55,7 +55,7 @@ public String getUnityCatalogIamArn() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - AwsIamRole that = (AwsIamRole) o; + AwsIamRoleResponse that = (AwsIamRoleResponse) o; return Objects.equals(externalId, that.externalId) && Objects.equals(roleArn, that.roleArn) && Objects.equals(unityCatalogIamArn, that.unityCatalogIamArn); @@ -68,7 +68,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(AwsIamRole.class) + return new ToStringer(AwsIamRoleResponse.class) .add("externalId", externalId) .add("roleArn", roleArn) .add("unityCatalogIamArn", unityCatalogIamArn) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityRequest.java new file mode 100755 index 000000000..36122a638 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityRequest.java @@ -0,0 +1,68 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AzureManagedIdentityRequest { + /** + * The Azure resource ID of the Azure Databricks Access Connector. Use the format + * /subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}. + */ + @JsonProperty("access_connector_id") + private String accessConnectorId; + + /** + * The Azure resource ID of the managed identity. Use the format + * /subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identity-name}. + * This is only available for user-assgined identities. For system-assigned identities, the + * access_connector_id is used to identify the identity. If this field is not provided, then we + * assume the AzureManagedIdentity is for a system-assigned identity. + */ + @JsonProperty("managed_identity_id") + private String managedIdentityId; + + public AzureManagedIdentityRequest setAccessConnectorId(String accessConnectorId) { + this.accessConnectorId = accessConnectorId; + return this; + } + + public String getAccessConnectorId() { + return accessConnectorId; + } + + public AzureManagedIdentityRequest setManagedIdentityId(String managedIdentityId) { + this.managedIdentityId = managedIdentityId; + return this; + } + + public String getManagedIdentityId() { + return managedIdentityId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AzureManagedIdentityRequest that = (AzureManagedIdentityRequest) o; + return Objects.equals(accessConnectorId, that.accessConnectorId) + && Objects.equals(managedIdentityId, that.managedIdentityId); + } + + @Override + public int hashCode() { + return Objects.hash(accessConnectorId, managedIdentityId); + } + + @Override + public String toString() { + return new ToStringer(AzureManagedIdentityRequest.class) + .add("accessConnectorId", accessConnectorId) + .add("managedIdentityId", managedIdentityId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentity.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityResponse.java similarity index 84% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentity.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityResponse.java index 2ea7f739c..91fbf7d9c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentity.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityResponse.java @@ -8,7 +8,7 @@ import java.util.Objects; @Generated -public class AzureManagedIdentity { +public class AzureManagedIdentityResponse { /** * The Azure resource ID of the Azure Databricks Access Connector. Use the format * /subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}. @@ -30,7 +30,7 @@ public class AzureManagedIdentity { @JsonProperty("managed_identity_id") private String managedIdentityId; - public AzureManagedIdentity setAccessConnectorId(String accessConnectorId) { + public AzureManagedIdentityResponse setAccessConnectorId(String accessConnectorId) { this.accessConnectorId = accessConnectorId; return this; } @@ -39,7 +39,7 @@ public String getAccessConnectorId() { return accessConnectorId; } - public AzureManagedIdentity setCredentialId(String credentialId) { + public AzureManagedIdentityResponse setCredentialId(String credentialId) { this.credentialId = credentialId; return this; } @@ -48,7 +48,7 @@ public String getCredentialId() { return credentialId; } - public AzureManagedIdentity setManagedIdentityId(String managedIdentityId) { + public AzureManagedIdentityResponse setManagedIdentityId(String managedIdentityId) { this.managedIdentityId = managedIdentityId; return this; } @@ -61,7 +61,7 @@ public String getManagedIdentityId() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - AzureManagedIdentity that = (AzureManagedIdentity) o; + AzureManagedIdentityResponse that = (AzureManagedIdentityResponse) o; return Objects.equals(accessConnectorId, that.accessConnectorId) && Objects.equals(credentialId, that.credentialId) && Objects.equals(managedIdentityId, that.managedIdentityId); @@ -74,7 +74,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(AzureManagedIdentity.class) + return new ToStringer(AzureManagedIdentityResponse.class) .add("accessConnectorId", accessConnectorId) .add("credentialId", credentialId) .add("managedIdentityId", managedIdentityId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshRequest.java index 308993366..edaee94b0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshRequest.java @@ -9,20 +9,11 @@ /** Cancel refresh */ @Generated public class CancelRefreshRequest { - /** Full name of the table. */ - private String fullName; - /** ID of the refresh. */ private String refreshId; - public CancelRefreshRequest setFullName(String fullName) { - this.fullName = fullName; - return this; - } - - public String getFullName() { - return fullName; - } + /** Full name of the table. */ + private String tableName; public CancelRefreshRequest setRefreshId(String refreshId) { this.refreshId = refreshId; @@ -33,24 +24,33 @@ public String getRefreshId() { return refreshId; } + public CancelRefreshRequest setTableName(String tableName) { + this.tableName = tableName; + return this; + } + + public String getTableName() { + return tableName; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CancelRefreshRequest that = (CancelRefreshRequest) o; - return Objects.equals(fullName, that.fullName) && Objects.equals(refreshId, that.refreshId); + return Objects.equals(refreshId, that.refreshId) && Objects.equals(tableName, that.tableName); } @Override public int hashCode() { - return Objects.hash(fullName, refreshId); + return Objects.hash(refreshId, tableName); } @Override public String toString() { return new ToStringer(CancelRefreshRequest.class) - .add("fullName", fullName) .add("refreshId", refreshId) + .add("tableName", tableName) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java index 313f31c88..c0cec6fcf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java @@ -10,7 +10,10 @@ @Generated public class CatalogInfo { - /** Indicate whether or not the catalog info contains only browsable metadata. */ + /** + * Indicates whether the principal is limited to retrieving metadata for the associated object + * through the BROWSE privilege when include_browse is enabled in the request. + */ @JsonProperty("browse_only") private Boolean browseOnly; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java index d9bae3a42..6b3ea1712 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -81,8 +82,9 @@ public CatalogInfo get(GetCatalogRequest request) { * caller has the **USE_CATALOG** privilege) will be retrieved. There is no guarantee of a * specific ordering of the elements in the array. */ - public Iterable list() { - return impl.list().getCatalogs(); + public Iterable list(ListCatalogsRequest request) { + return new Paginator<>( + request, impl::list, ListCatalogsResponse::getCatalogs, response -> null); } public CatalogInfo update(String name) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsImpl.java index d0be97ff1..404904ace 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsImpl.java @@ -41,11 +41,11 @@ public CatalogInfo get(GetCatalogRequest request) { } @Override - public ListCatalogsResponse list() { + public ListCatalogsResponse list(ListCatalogsRequest request) { String path = "/api/2.1/unity-catalog/catalogs"; Map headers = new HashMap<>(); headers.put("Accept", "application/json"); - return apiClient.GET(path, ListCatalogsResponse.class, headers); + return apiClient.GET(path, request, ListCatalogsResponse.class, headers); } @Override diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsService.java index 9047ff738..86f3a6b7d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogsService.java @@ -50,7 +50,7 @@ public interface CatalogsService { * caller has the **USE_CATALOG** privilege) will be retrieved. There is no guarantee of a * specific ordering of the elements in the array. */ - ListCatalogsResponse list(); + ListCatalogsResponse list(ListCatalogsRequest listCatalogsRequest); /** * Update a catalog. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java index eaa07f9fd..04c6eab69 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -84,7 +85,8 @@ public ConnectionInfo get(GetConnectionRequest request) { *

List all connections. */ public Iterable list() { - return impl.list().getConnections(); + return new Paginator<>( + null, (Void v) -> impl.list(), ListConnectionsResponse::getConnections, response -> null); } public ConnectionInfo update(String name, Map options) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMonitor.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMonitor.java index 0beb7de43..d7aa83947 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMonitor.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateMonitor.java @@ -27,22 +27,19 @@ public class CreateMonitor { * time windows). */ @JsonProperty("custom_metrics") - private Collection customMetrics; + private Collection customMetrics; /** The data classification config for the monitor. */ @JsonProperty("data_classification_config") private MonitorDataClassificationConfig dataClassificationConfig; - /** Full name of the table. */ - private String fullName; - /** Configuration for monitoring inference logs. */ @JsonProperty("inference_log") - private MonitorInferenceLogProfileType inferenceLog; + private MonitorInferenceLog inferenceLog; /** The notification settings for the monitor. */ @JsonProperty("notifications") - private Collection notifications; + private MonitorNotifications notifications; /** Schema where output metric tables are created. */ @JsonProperty("output_schema_name") @@ -67,11 +64,14 @@ public class CreateMonitor { /** Configuration for monitoring snapshot tables. */ @JsonProperty("snapshot") - private MonitorSnapshotProfileType snapshot; + private MonitorSnapshot snapshot; + + /** Full name of the table. */ + private String tableName; /** Configuration for monitoring time series tables. */ @JsonProperty("time_series") - private MonitorTimeSeriesProfileType timeSeries; + private MonitorTimeSeries timeSeries; /** * Optional argument to specify the warehouse for dashboard creation. If not specified, the first @@ -98,12 +98,12 @@ public String getBaselineTableName() { return baselineTableName; } - public CreateMonitor setCustomMetrics(Collection customMetrics) { + public CreateMonitor setCustomMetrics(Collection customMetrics) { this.customMetrics = customMetrics; return this; } - public Collection getCustomMetrics() { + public Collection getCustomMetrics() { return customMetrics; } @@ -117,30 +117,21 @@ public MonitorDataClassificationConfig getDataClassificationConfig() { return dataClassificationConfig; } - public CreateMonitor setFullName(String fullName) { - this.fullName = fullName; - return this; - } - - public String getFullName() { - return fullName; - } - - public CreateMonitor setInferenceLog(MonitorInferenceLogProfileType inferenceLog) { + public CreateMonitor setInferenceLog(MonitorInferenceLog inferenceLog) { this.inferenceLog = inferenceLog; return this; } - public MonitorInferenceLogProfileType getInferenceLog() { + public MonitorInferenceLog getInferenceLog() { return inferenceLog; } - public CreateMonitor setNotifications(Collection notifications) { + public CreateMonitor setNotifications(MonitorNotifications notifications) { this.notifications = notifications; return this; } - public Collection getNotifications() { + public MonitorNotifications getNotifications() { return notifications; } @@ -180,21 +171,30 @@ public Collection getSlicingExprs() { return slicingExprs; } - public CreateMonitor setSnapshot(MonitorSnapshotProfileType snapshot) { + public CreateMonitor setSnapshot(MonitorSnapshot snapshot) { this.snapshot = snapshot; return this; } - public MonitorSnapshotProfileType getSnapshot() { + public MonitorSnapshot getSnapshot() { return snapshot; } - public CreateMonitor setTimeSeries(MonitorTimeSeriesProfileType timeSeries) { + public CreateMonitor setTableName(String tableName) { + this.tableName = tableName; + return this; + } + + public String getTableName() { + return tableName; + } + + public CreateMonitor setTimeSeries(MonitorTimeSeries timeSeries) { this.timeSeries = timeSeries; return this; } - public MonitorTimeSeriesProfileType getTimeSeries() { + public MonitorTimeSeries getTimeSeries() { return timeSeries; } @@ -216,7 +216,6 @@ public boolean equals(Object o) { && Objects.equals(baselineTableName, that.baselineTableName) && Objects.equals(customMetrics, that.customMetrics) && Objects.equals(dataClassificationConfig, that.dataClassificationConfig) - && Objects.equals(fullName, that.fullName) && Objects.equals(inferenceLog, that.inferenceLog) && Objects.equals(notifications, that.notifications) && Objects.equals(outputSchemaName, that.outputSchemaName) @@ -224,6 +223,7 @@ public boolean equals(Object o) { && Objects.equals(skipBuiltinDashboard, that.skipBuiltinDashboard) && Objects.equals(slicingExprs, that.slicingExprs) && Objects.equals(snapshot, that.snapshot) + && Objects.equals(tableName, that.tableName) && Objects.equals(timeSeries, that.timeSeries) && Objects.equals(warehouseId, that.warehouseId); } @@ -235,7 +235,6 @@ public int hashCode() { baselineTableName, customMetrics, dataClassificationConfig, - fullName, inferenceLog, notifications, outputSchemaName, @@ -243,6 +242,7 @@ public int hashCode() { skipBuiltinDashboard, slicingExprs, snapshot, + tableName, timeSeries, warehouseId); } @@ -254,7 +254,6 @@ public String toString() { .add("baselineTableName", baselineTableName) .add("customMetrics", customMetrics) .add("dataClassificationConfig", dataClassificationConfig) - .add("fullName", fullName) .add("inferenceLog", inferenceLog) .add("notifications", notifications) .add("outputSchemaName", outputSchemaName) @@ -262,6 +261,7 @@ public String toString() { .add("skipBuiltinDashboard", skipBuiltinDashboard) .add("slicingExprs", slicingExprs) .add("snapshot", snapshot) + .add("tableName", tableName) .add("timeSeries", timeSeries) .add("warehouseId", warehouseId) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ViewData.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java similarity index 75% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ViewData.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java index 9c78e4656..4b77e9b13 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ViewData.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateOnlineTableRequest.java @@ -9,7 +9,7 @@ /** Online Table information. */ @Generated -public class ViewData { +public class CreateOnlineTableRequest { /** Full three-part (catalog, schema, table) name of the table. */ @JsonProperty("name") private String name; @@ -18,7 +18,7 @@ public class ViewData { @JsonProperty("spec") private OnlineTableSpec spec; - public ViewData setName(String name) { + public CreateOnlineTableRequest setName(String name) { this.name = name; return this; } @@ -27,7 +27,7 @@ public String getName() { return name; } - public ViewData setSpec(OnlineTableSpec spec) { + public CreateOnlineTableRequest setSpec(OnlineTableSpec spec) { this.spec = spec; return this; } @@ -40,7 +40,7 @@ public OnlineTableSpec getSpec() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - ViewData that = (ViewData) o; + CreateOnlineTableRequest that = (CreateOnlineTableRequest) o; return Objects.equals(name, that.name) && Objects.equals(spec, that.spec); } @@ -51,6 +51,9 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(ViewData.class).add("name", name).add("spec", spec).toString(); + return new ToStringer(CreateOnlineTableRequest.class) + .add("name", name) + .add("spec", spec) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java index 0054ad672..23717f93d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateStorageCredential.java @@ -11,11 +11,11 @@ public class CreateStorageCredential { /** The AWS IAM role configuration. */ @JsonProperty("aws_iam_role") - private AwsIamRole awsIamRole; + private AwsIamRoleRequest awsIamRole; /** The Azure managed identity configuration. */ @JsonProperty("azure_managed_identity") - private AzureManagedIdentity azureManagedIdentity; + private AzureManagedIdentityRequest azureManagedIdentity; /** The Azure service principal configuration. */ @JsonProperty("azure_service_principal") @@ -45,22 +45,22 @@ public class CreateStorageCredential { @JsonProperty("skip_validation") private Boolean skipValidation; - public CreateStorageCredential setAwsIamRole(AwsIamRole awsIamRole) { + public CreateStorageCredential setAwsIamRole(AwsIamRoleRequest awsIamRole) { this.awsIamRole = awsIamRole; return this; } - public AwsIamRole getAwsIamRole() { + public AwsIamRoleRequest getAwsIamRole() { return awsIamRole; } public CreateStorageCredential setAzureManagedIdentity( - AzureManagedIdentity azureManagedIdentity) { + AzureManagedIdentityRequest azureManagedIdentity) { this.azureManagedIdentity = azureManagedIdentity; return this; } - public AzureManagedIdentity getAzureManagedIdentity() { + public AzureManagedIdentityRequest getAzureManagedIdentity() { return azureManagedIdentity; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteLakehouseMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteLakehouseMonitorRequest.java index ecd871b14..00de48b2f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteLakehouseMonitorRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeleteLakehouseMonitorRequest.java @@ -10,15 +10,15 @@ @Generated public class DeleteLakehouseMonitorRequest { /** Full name of the table. */ - private String fullName; + private String tableName; - public DeleteLakehouseMonitorRequest setFullName(String fullName) { - this.fullName = fullName; + public DeleteLakehouseMonitorRequest setTableName(String tableName) { + this.tableName = tableName; return this; } - public String getFullName() { - return fullName; + public String getTableName() { + return tableName; } @Override @@ -26,16 +26,18 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DeleteLakehouseMonitorRequest that = (DeleteLakehouseMonitorRequest) o; - return Objects.equals(fullName, that.fullName); + return Objects.equals(tableName, that.tableName); } @Override public int hashCode() { - return Objects.hash(fullName); + return Objects.hash(tableName); } @Override public String toString() { - return new ToStringer(DeleteLakehouseMonitorRequest.class).add("fullName", fullName).toString(); + return new ToStringer(DeleteLakehouseMonitorRequest.class) + .add("tableName", tableName) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java index 801739d2c..ef3d12232 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationInfo.java @@ -13,6 +13,13 @@ public class ExternalLocationInfo { @JsonProperty("access_point") private String accessPoint; + /** + * Indicates whether the principal is limited to retrieving metadata for the associated object + * through the BROWSE privilege when include_browse is enabled in the request. + */ + @JsonProperty("browse_only") + private Boolean browseOnly; + /** User-provided free-form text description. */ @JsonProperty("comment") private String comment; @@ -74,6 +81,15 @@ public String getAccessPoint() { return accessPoint; } + public ExternalLocationInfo setBrowseOnly(Boolean browseOnly) { + this.browseOnly = browseOnly; + return this; + } + + public Boolean getBrowseOnly() { + return browseOnly; + } + public ExternalLocationInfo setComment(String comment) { this.comment = comment; return this; @@ -197,6 +213,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; ExternalLocationInfo that = (ExternalLocationInfo) o; return Objects.equals(accessPoint, that.accessPoint) + && Objects.equals(browseOnly, that.browseOnly) && Objects.equals(comment, that.comment) && Objects.equals(createdAt, that.createdAt) && Objects.equals(createdBy, that.createdBy) @@ -216,6 +233,7 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( accessPoint, + browseOnly, comment, createdAt, createdBy, @@ -235,6 +253,7 @@ public int hashCode() { public String toString() { return new ToStringer(ExternalLocationInfo.class) .add("accessPoint", accessPoint) + .add("browseOnly", browseOnly) .add("comment", comment) .add("createdAt", createdAt) .add("createdBy", createdBy) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java index d9759ac31..361875fbe 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsAPI.java @@ -84,9 +84,8 @@ public ExternalLocationInfo get(GetExternalLocationRequest request) { * *

Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore. * The caller must be a metastore admin, the owner of the external location, or a user that has - * some privilege on the external location. For unpaginated request, there is no guarantee of a - * specific ordering of the elements in the array. For paginated request, elements are ordered by - * their name. + * some privilege on the external location. There is no guarantee of a specific ordering of the + * elements in the array. */ public Iterable list(ListExternalLocationsRequest request) { return new Paginator<>( diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java index 0cefbac94..ec6a4d48d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ExternalLocationsService.java @@ -51,9 +51,8 @@ public interface ExternalLocationsService { * *

Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore. * The caller must be a metastore admin, the owner of the external location, or a user that has - * some privilege on the external location. For unpaginated request, there is no guarantee of a - * specific ordering of the elements in the array. For paginated request, elements are ordered by - * their name. + * some privilege on the external location. There is no guarantee of a specific ordering of the + * elements in the array. */ ListExternalLocationsResponse list(ListExternalLocationsRequest listExternalLocationsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfo.java index fe25a1bcf..02b5d835d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionInfo.java @@ -9,6 +9,13 @@ @Generated public class FunctionInfo { + /** + * Indicates whether the principal is limited to retrieving metadata for the associated object + * through the BROWSE privilege when include_browse is enabled in the request. + */ + @JsonProperty("browse_only") + private Boolean browseOnly; + /** Name of parent catalog. */ @JsonProperty("catalog_name") private String catalogName; @@ -130,6 +137,15 @@ public class FunctionInfo { @JsonProperty("updated_by") private String updatedBy; + public FunctionInfo setBrowseOnly(Boolean browseOnly) { + this.browseOnly = browseOnly; + return this; + } + + public Boolean getBrowseOnly() { + return browseOnly; + } + public FunctionInfo setCatalogName(String catalogName) { this.catalogName = catalogName; return this; @@ -396,7 +412,8 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; FunctionInfo that = (FunctionInfo) o; - return Objects.equals(catalogName, that.catalogName) + return Objects.equals(browseOnly, that.browseOnly) + && Objects.equals(catalogName, that.catalogName) && Objects.equals(comment, that.comment) && Objects.equals(createdAt, that.createdAt) && Objects.equals(createdBy, that.createdBy) @@ -430,6 +447,7 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( + browseOnly, catalogName, comment, createdAt, @@ -464,6 +482,7 @@ public int hashCode() { @Override public String toString() { return new ToStringer(FunctionInfo.class) + .add("browseOnly", browseOnly) .add("catalogName", catalogName) .add("comment", comment) .add("createdAt", createdAt) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java index e5e01a85e..d4e3e587b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java @@ -94,8 +94,8 @@ public Iterable list(String catalogName, String schemaName) { * admin, all functions are returned in the output list. Otherwise, the user must have the * **USE_CATALOG** privilege on the catalog and the **USE_SCHEMA** privilege on the schema, and * the output list contains only functions for which either the user has the **EXECUTE** privilege - * or the user is the owner. For unpaginated request, there is no guarantee of a specific ordering - * of the elements in the array. For paginated request, elements are ordered by their name. + * or the user is the owner. There is no guarantee of a specific ordering of the elements in the + * array. */ public Iterable list(ListFunctionsRequest request) { return new Paginator<>( diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java index f891bfd19..c47075d69 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java @@ -58,8 +58,8 @@ public interface FunctionsService { * admin, all functions are returned in the output list. Otherwise, the user must have the * **USE_CATALOG** privilege on the catalog and the **USE_SCHEMA** privilege on the schema, and * the output list contains only functions for which either the user has the **EXECUTE** privilege - * or the user is the owner. For unpaginated request, there is no guarantee of a specific ordering - * of the elements in the array. For paginated request, elements are ordered by their name. + * or the user is the owner. There is no guarantee of a specific ordering of the elements in the + * array. */ ListFunctionsResponse list(ListFunctionsRequest listFunctionsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogRequest.java index 6c8392670..f70ee0013 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetCatalogRequest.java @@ -3,15 +3,32 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; import java.util.Objects; /** Get a catalog */ @Generated public class GetCatalogRequest { + /** + * Whether to include catalogs in the response for which the principal can only access selective + * metadata for + */ + @QueryParam("include_browse") + private Boolean includeBrowse; + /** The name of the catalog. */ private String name; + public GetCatalogRequest setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + public GetCatalogRequest setName(String name) { this.name = name; return this; @@ -26,16 +43,19 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GetCatalogRequest that = (GetCatalogRequest) o; - return Objects.equals(name, that.name); + return Objects.equals(includeBrowse, that.includeBrowse) && Objects.equals(name, that.name); } @Override public int hashCode() { - return Objects.hash(name); + return Objects.hash(includeBrowse, name); } @Override public String toString() { - return new ToStringer(GetCatalogRequest.class).add("name", name).toString(); + return new ToStringer(GetCatalogRequest.class) + .add("includeBrowse", includeBrowse) + .add("name", name) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequest.java index e206ae3f6..45bd02098 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetExternalLocationRequest.java @@ -3,15 +3,32 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; import java.util.Objects; /** Get an external location */ @Generated public class GetExternalLocationRequest { + /** + * Whether to include external locations in the response for which the principal can only access + * selective metadata for + */ + @QueryParam("include_browse") + private Boolean includeBrowse; + /** Name of the external location. */ private String name; + public GetExternalLocationRequest setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + public GetExternalLocationRequest setName(String name) { this.name = name; return this; @@ -26,16 +43,19 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GetExternalLocationRequest that = (GetExternalLocationRequest) o; - return Objects.equals(name, that.name); + return Objects.equals(includeBrowse, that.includeBrowse) && Objects.equals(name, that.name); } @Override public int hashCode() { - return Objects.hash(name); + return Objects.hash(includeBrowse, name); } @Override public String toString() { - return new ToStringer(GetExternalLocationRequest.class).add("name", name).toString(); + return new ToStringer(GetExternalLocationRequest.class) + .add("includeBrowse", includeBrowse) + .add("name", name) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequest.java index d4e0cdc60..24e22c9cc 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetFunctionRequest.java @@ -3,18 +3,35 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; import java.util.Objects; /** Get a function */ @Generated public class GetFunctionRequest { + /** + * Whether to include functions in the response for which the principal can only access selective + * metadata for + */ + @QueryParam("include_browse") + private Boolean includeBrowse; + /** * The fully-qualified name of the function (of the form * __catalog_name__.__schema_name__.__function__name__). */ private String name; + public GetFunctionRequest setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + public GetFunctionRequest setName(String name) { this.name = name; return this; @@ -29,16 +46,19 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GetFunctionRequest that = (GetFunctionRequest) o; - return Objects.equals(name, that.name); + return Objects.equals(includeBrowse, that.includeBrowse) && Objects.equals(name, that.name); } @Override public int hashCode() { - return Objects.hash(name); + return Objects.hash(includeBrowse, name); } @Override public String toString() { - return new ToStringer(GetFunctionRequest.class).add("name", name).toString(); + return new ToStringer(GetFunctionRequest.class) + .add("includeBrowse", includeBrowse) + .add("name", name) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetLakehouseMonitorRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetLakehouseMonitorRequest.java index f03af68cd..b8bd26b3b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetLakehouseMonitorRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetLakehouseMonitorRequest.java @@ -10,15 +10,15 @@ @Generated public class GetLakehouseMonitorRequest { /** Full name of the table. */ - private String fullName; + private String tableName; - public GetLakehouseMonitorRequest setFullName(String fullName) { - this.fullName = fullName; + public GetLakehouseMonitorRequest setTableName(String tableName) { + this.tableName = tableName; return this; } - public String getFullName() { - return fullName; + public String getTableName() { + return tableName; } @Override @@ -26,16 +26,16 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GetLakehouseMonitorRequest that = (GetLakehouseMonitorRequest) o; - return Objects.equals(fullName, that.fullName); + return Objects.equals(tableName, that.tableName); } @Override public int hashCode() { - return Objects.hash(fullName); + return Objects.hash(tableName); } @Override public String toString() { - return new ToStringer(GetLakehouseMonitorRequest.class).add("fullName", fullName).toString(); + return new ToStringer(GetLakehouseMonitorRequest.class).add("tableName", tableName).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetModelVersionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetModelVersionRequest.java index 776b47df5..2d8466952 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetModelVersionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetModelVersionRequest.java @@ -3,6 +3,7 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; import java.util.Objects; @@ -12,6 +13,13 @@ public class GetModelVersionRequest { /** The three-level (fully qualified) name of the model version */ private String fullName; + /** + * Whether to include model versions in the response for which the principal can only access + * selective metadata for + */ + @QueryParam("include_browse") + private Boolean includeBrowse; + /** The integer version number of the model version */ private Long version; @@ -24,6 +32,15 @@ public String getFullName() { return fullName; } + public GetModelVersionRequest setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + public GetModelVersionRequest setVersion(Long version) { this.version = version; return this; @@ -38,18 +55,21 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GetModelVersionRequest that = (GetModelVersionRequest) o; - return Objects.equals(fullName, that.fullName) && Objects.equals(version, that.version); + return Objects.equals(fullName, that.fullName) + && Objects.equals(includeBrowse, that.includeBrowse) + && Objects.equals(version, that.version); } @Override public int hashCode() { - return Objects.hash(fullName, version); + return Objects.hash(fullName, includeBrowse, version); } @Override public String toString() { return new ToStringer(GetModelVersionRequest.class) .add("fullName", fullName) + .add("includeBrowse", includeBrowse) .add("version", version) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRefreshRequest.java index 1acf7ecd4..e94088dc8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRefreshRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRefreshRequest.java @@ -9,20 +9,11 @@ /** Get refresh */ @Generated public class GetRefreshRequest { - /** Full name of the table. */ - private String fullName; - /** ID of the refresh. */ private String refreshId; - public GetRefreshRequest setFullName(String fullName) { - this.fullName = fullName; - return this; - } - - public String getFullName() { - return fullName; - } + /** Full name of the table. */ + private String tableName; public GetRefreshRequest setRefreshId(String refreshId) { this.refreshId = refreshId; @@ -33,24 +24,33 @@ public String getRefreshId() { return refreshId; } + public GetRefreshRequest setTableName(String tableName) { + this.tableName = tableName; + return this; + } + + public String getTableName() { + return tableName; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GetRefreshRequest that = (GetRefreshRequest) o; - return Objects.equals(fullName, that.fullName) && Objects.equals(refreshId, that.refreshId); + return Objects.equals(refreshId, that.refreshId) && Objects.equals(tableName, that.tableName); } @Override public int hashCode() { - return Objects.hash(fullName, refreshId); + return Objects.hash(refreshId, tableName); } @Override public String toString() { return new ToStringer(GetRefreshRequest.class) - .add("fullName", fullName) .add("refreshId", refreshId) + .add("tableName", tableName) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRegisteredModelRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRegisteredModelRequest.java index 0913ad906..764dda115 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRegisteredModelRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetRegisteredModelRequest.java @@ -3,6 +3,7 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; import java.util.Objects; @@ -12,6 +13,13 @@ public class GetRegisteredModelRequest { /** The three-level (fully qualified) name of the registered model */ private String fullName; + /** + * Whether to include registered models in the response for which the principal can only access + * selective metadata for + */ + @QueryParam("include_browse") + private Boolean includeBrowse; + public GetRegisteredModelRequest setFullName(String fullName) { this.fullName = fullName; return this; @@ -21,21 +29,34 @@ public String getFullName() { return fullName; } + public GetRegisteredModelRequest setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GetRegisteredModelRequest that = (GetRegisteredModelRequest) o; - return Objects.equals(fullName, that.fullName); + return Objects.equals(fullName, that.fullName) + && Objects.equals(includeBrowse, that.includeBrowse); } @Override public int hashCode() { - return Objects.hash(fullName); + return Objects.hash(fullName, includeBrowse); } @Override public String toString() { - return new ToStringer(GetRegisteredModelRequest.class).add("fullName", fullName).toString(); + return new ToStringer(GetRegisteredModelRequest.class) + .add("fullName", fullName) + .add("includeBrowse", includeBrowse) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSchemaRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSchemaRequest.java index 23e5b7b80..4d79abe5f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSchemaRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetSchemaRequest.java @@ -3,6 +3,7 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; import java.util.Objects; @@ -12,6 +13,13 @@ public class GetSchemaRequest { /** Full name of the schema. */ private String fullName; + /** + * Whether to include schemas in the response for which the principal can only access selective + * metadata for + */ + @QueryParam("include_browse") + private Boolean includeBrowse; + public GetSchemaRequest setFullName(String fullName) { this.fullName = fullName; return this; @@ -21,21 +29,34 @@ public String getFullName() { return fullName; } + public GetSchemaRequest setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GetSchemaRequest that = (GetSchemaRequest) o; - return Objects.equals(fullName, that.fullName); + return Objects.equals(fullName, that.fullName) + && Objects.equals(includeBrowse, that.includeBrowse); } @Override public int hashCode() { - return Objects.hash(fullName); + return Objects.hash(fullName, includeBrowse); } @Override public String toString() { - return new ToStringer(GetSchemaRequest.class).add("fullName", fullName).toString(); + return new ToStringer(GetSchemaRequest.class) + .add("fullName", fullName) + .add("includeBrowse", includeBrowse) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequest.java index cea52f229..4658fa3d3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequest.java @@ -13,6 +13,13 @@ public class GetTableRequest { /** Full name of the table. */ private String fullName; + /** + * Whether to include tables in the response for which the principal can only access selective + * metadata for + */ + @QueryParam("include_browse") + private Boolean includeBrowse; + /** Whether delta metadata should be included in the response. */ @QueryParam("include_delta_metadata") private Boolean includeDeltaMetadata; @@ -26,6 +33,15 @@ public String getFullName() { return fullName; } + public GetTableRequest setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + public GetTableRequest setIncludeDeltaMetadata(Boolean includeDeltaMetadata) { this.includeDeltaMetadata = includeDeltaMetadata; return this; @@ -41,18 +57,20 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; GetTableRequest that = (GetTableRequest) o; return Objects.equals(fullName, that.fullName) + && Objects.equals(includeBrowse, that.includeBrowse) && Objects.equals(includeDeltaMetadata, that.includeDeltaMetadata); } @Override public int hashCode() { - return Objects.hash(fullName, includeDeltaMetadata); + return Objects.hash(fullName, includeBrowse, includeDeltaMetadata); } @Override public String toString() { return new ToStringer(GetTableRequest.class) .add("fullName", fullName) + .add("includeBrowse", includeBrowse) .add("includeDeltaMetadata", includeDeltaMetadata) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMonitorsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMonitorsAPI.java index 13a834477..f0f36d9e4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMonitorsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMonitorsAPI.java @@ -31,8 +31,8 @@ public LakehouseMonitorsAPI(LakehouseMonitorsService mock) { impl = mock; } - public void cancelRefresh(String fullName, String refreshId) { - cancelRefresh(new CancelRefreshRequest().setFullName(fullName).setRefreshId(refreshId)); + public void cancelRefresh(String tableName, String refreshId) { + cancelRefresh(new CancelRefreshRequest().setTableName(tableName).setRefreshId(refreshId)); } /** @@ -51,10 +51,10 @@ public void cancelRefresh(CancelRefreshRequest request) { impl.cancelRefresh(request); } - public MonitorInfo create(String fullName, String assetsDir, String outputSchemaName) { + public MonitorInfo create(String tableName, String assetsDir, String outputSchemaName) { return create( new CreateMonitor() - .setFullName(fullName) + .setTableName(tableName) .setAssetsDir(assetsDir) .setOutputSchemaName(outputSchemaName)); } @@ -77,8 +77,8 @@ public MonitorInfo create(CreateMonitor request) { return impl.create(request); } - public void delete(String fullName) { - delete(new DeleteLakehouseMonitorRequest().setFullName(fullName)); + public void delete(String tableName) { + delete(new DeleteLakehouseMonitorRequest().setTableName(tableName)); } /** @@ -100,8 +100,8 @@ public void delete(DeleteLakehouseMonitorRequest request) { impl.delete(request); } - public MonitorInfo get(String fullName) { - return get(new GetLakehouseMonitorRequest().setFullName(fullName)); + public MonitorInfo get(String tableName) { + return get(new GetLakehouseMonitorRequest().setTableName(tableName)); } /** @@ -122,8 +122,8 @@ public MonitorInfo get(GetLakehouseMonitorRequest request) { return impl.get(request); } - public MonitorRefreshInfo getRefresh(String fullName, String refreshId) { - return getRefresh(new GetRefreshRequest().setFullName(fullName).setRefreshId(refreshId)); + public MonitorRefreshInfo getRefresh(String tableName, String refreshId) { + return getRefresh(new GetRefreshRequest().setTableName(tableName).setRefreshId(refreshId)); } /** @@ -142,8 +142,8 @@ public MonitorRefreshInfo getRefresh(GetRefreshRequest request) { return impl.getRefresh(request); } - public Iterable listRefreshes(String fullName) { - return listRefreshes(new ListRefreshesRequest().setFullName(fullName)); + public Iterable listRefreshes(String tableName) { + return listRefreshes(new ListRefreshesRequest().setTableName(tableName)); } /** @@ -162,8 +162,8 @@ public Iterable listRefreshes(ListRefreshesRequest request) return impl.listRefreshes(request); } - public MonitorRefreshInfo runRefresh(String fullName) { - return runRefresh(new RunRefreshRequest().setFullName(fullName)); + public MonitorRefreshInfo runRefresh(String tableName) { + return runRefresh(new RunRefreshRequest().setTableName(tableName)); } /** @@ -183,8 +183,9 @@ public MonitorRefreshInfo runRefresh(RunRefreshRequest request) { return impl.runRefresh(request); } - public MonitorInfo update(String fullName, String outputSchemaName) { - return update(new UpdateMonitor().setFullName(fullName).setOutputSchemaName(outputSchemaName)); + public MonitorInfo update(String tableName, String outputSchemaName) { + return update( + new UpdateMonitor().setTableName(tableName).setOutputSchemaName(outputSchemaName)); } /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMonitorsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMonitorsImpl.java index cb89939b6..bfc51e5f3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMonitorsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/LakehouseMonitorsImpl.java @@ -21,14 +21,14 @@ public void cancelRefresh(CancelRefreshRequest request) { String path = String.format( "/api/2.1/unity-catalog/tables/%s/monitor/refreshes/%s/cancel", - request.getFullName(), request.getRefreshId()); + request.getTableName(), request.getRefreshId()); Map headers = new HashMap<>(); apiClient.POST(path, null, CancelRefreshResponse.class, headers); } @Override public MonitorInfo create(CreateMonitor request) { - String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor", request.getFullName()); + String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor", request.getTableName()); Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); @@ -37,14 +37,14 @@ public MonitorInfo create(CreateMonitor request) { @Override public void delete(DeleteLakehouseMonitorRequest request) { - String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor", request.getFullName()); + String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor", request.getTableName()); Map headers = new HashMap<>(); apiClient.DELETE(path, request, DeleteResponse.class, headers); } @Override public MonitorInfo get(GetLakehouseMonitorRequest request) { - String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor", request.getFullName()); + String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor", request.getTableName()); Map headers = new HashMap<>(); headers.put("Accept", "application/json"); return apiClient.GET(path, request, MonitorInfo.class, headers); @@ -55,7 +55,7 @@ public MonitorRefreshInfo getRefresh(GetRefreshRequest request) { String path = String.format( "/api/2.1/unity-catalog/tables/%s/monitor/refreshes/%s", - request.getFullName(), request.getRefreshId()); + request.getTableName(), request.getRefreshId()); Map headers = new HashMap<>(); headers.put("Accept", "application/json"); return apiClient.GET(path, request, MonitorRefreshInfo.class, headers); @@ -64,7 +64,7 @@ public MonitorRefreshInfo getRefresh(GetRefreshRequest request) { @Override public Collection listRefreshes(ListRefreshesRequest request) { String path = - String.format("/api/2.1/unity-catalog/tables/%s/monitor/refreshes", request.getFullName()); + String.format("/api/2.1/unity-catalog/tables/%s/monitor/refreshes", request.getTableName()); Map headers = new HashMap<>(); headers.put("Accept", "application/json"); return apiClient.getCollection(path, null, MonitorRefreshInfo.class, headers); @@ -73,7 +73,7 @@ public Collection listRefreshes(ListRefreshesRequest request @Override public MonitorRefreshInfo runRefresh(RunRefreshRequest request) { String path = - String.format("/api/2.1/unity-catalog/tables/%s/monitor/refreshes", request.getFullName()); + String.format("/api/2.1/unity-catalog/tables/%s/monitor/refreshes", request.getTableName()); Map headers = new HashMap<>(); headers.put("Accept", "application/json"); return apiClient.POST(path, null, MonitorRefreshInfo.class, headers); @@ -81,7 +81,7 @@ public MonitorRefreshInfo runRefresh(RunRefreshRequest request) { @Override public MonitorInfo update(UpdateMonitor request) { - String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor", request.getFullName()); + String path = String.format("/api/2.1/unity-catalog/tables/%s/monitor", request.getTableName()); Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsRequest.java new file mode 100755 index 000000000..f61ff29f3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListCatalogsRequest.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** List catalogs */ +@Generated +public class ListCatalogsRequest { + /** + * Whether to include catalogs in the response for which the principal can only access selective + * metadata for + */ + @QueryParam("include_browse") + private Boolean includeBrowse; + + public ListCatalogsRequest setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListCatalogsRequest that = (ListCatalogsRequest) o; + return Objects.equals(includeBrowse, that.includeBrowse); + } + + @Override + public int hashCode() { + return Objects.hash(includeBrowse); + } + + @Override + public String toString() { + return new ToStringer(ListCatalogsRequest.class).add("includeBrowse", includeBrowse).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java index 74ef5cbf3..a8a0e3729 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListExternalLocationsRequest.java @@ -10,6 +10,13 @@ /** List external locations */ @Generated public class ListExternalLocationsRequest { + /** + * Whether to include external locations in the response for which the principal can only access + * selective metadata for + */ + @QueryParam("include_browse") + private Boolean includeBrowse; + /** * Maximum number of external locations to return. If not set, all the external locations are * returned (not recommended). - when set to a value greater than 0, the page length is the @@ -24,6 +31,15 @@ public class ListExternalLocationsRequest { @QueryParam("page_token") private String pageToken; + public ListExternalLocationsRequest setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + public ListExternalLocationsRequest setMaxResults(Long maxResults) { this.maxResults = maxResults; return this; @@ -47,17 +63,20 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ListExternalLocationsRequest that = (ListExternalLocationsRequest) o; - return Objects.equals(maxResults, that.maxResults) && Objects.equals(pageToken, that.pageToken); + return Objects.equals(includeBrowse, that.includeBrowse) + && Objects.equals(maxResults, that.maxResults) + && Objects.equals(pageToken, that.pageToken); } @Override public int hashCode() { - return Objects.hash(maxResults, pageToken); + return Objects.hash(includeBrowse, maxResults, pageToken); } @Override public String toString() { return new ToStringer(ListExternalLocationsRequest.class) + .add("includeBrowse", includeBrowse) .add("maxResults", maxResults) .add("pageToken", pageToken) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java index 7b430e1a0..c33c3be8d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListFunctionsRequest.java @@ -14,6 +14,13 @@ public class ListFunctionsRequest { @QueryParam("catalog_name") private String catalogName; + /** + * Whether to include functions in the response for which the principal can only access selective + * metadata for + */ + @QueryParam("include_browse") + private Boolean includeBrowse; + /** * Maximum number of functions to return. If not set, all the functions are returned (not * recommended). - when set to a value greater than 0, the page length is the minimum of this @@ -41,6 +48,15 @@ public String getCatalogName() { return catalogName; } + public ListFunctionsRequest setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + public ListFunctionsRequest setMaxResults(Long maxResults) { this.maxResults = maxResults; return this; @@ -74,6 +90,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; ListFunctionsRequest that = (ListFunctionsRequest) o; return Objects.equals(catalogName, that.catalogName) + && Objects.equals(includeBrowse, that.includeBrowse) && Objects.equals(maxResults, that.maxResults) && Objects.equals(pageToken, that.pageToken) && Objects.equals(schemaName, that.schemaName); @@ -81,13 +98,14 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(catalogName, maxResults, pageToken, schemaName); + return Objects.hash(catalogName, includeBrowse, maxResults, pageToken, schemaName); } @Override public String toString() { return new ToStringer(ListFunctionsRequest.class) .add("catalogName", catalogName) + .add("includeBrowse", includeBrowse) .add("maxResults", maxResults) .add("pageToken", pageToken) .add("schemaName", schemaName) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsRequest.java index 74945e49d..2df9e27c7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListModelVersionsRequest.java @@ -13,6 +13,13 @@ public class ListModelVersionsRequest { /** The full three-level name of the registered model under which to list model versions */ private String fullName; + /** + * Whether to include model versions in the response for which the principal can only access + * selective metadata for + */ + @QueryParam("include_browse") + private Boolean includeBrowse; + /** * Maximum number of model versions to return. If not set, the page length is set to a server * configured value (100, as of 1/3/2024). - when set to a value greater than 0, the page length @@ -36,6 +43,15 @@ public String getFullName() { return fullName; } + public ListModelVersionsRequest setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + public ListModelVersionsRequest setMaxResults(Long maxResults) { this.maxResults = maxResults; return this; @@ -60,19 +76,21 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; ListModelVersionsRequest that = (ListModelVersionsRequest) o; return Objects.equals(fullName, that.fullName) + && Objects.equals(includeBrowse, that.includeBrowse) && Objects.equals(maxResults, that.maxResults) && Objects.equals(pageToken, that.pageToken); } @Override public int hashCode() { - return Objects.hash(fullName, maxResults, pageToken); + return Objects.hash(fullName, includeBrowse, maxResults, pageToken); } @Override public String toString() { return new ToStringer(ListModelVersionsRequest.class) .add("fullName", fullName) + .add("includeBrowse", includeBrowse) .add("maxResults", maxResults) .add("pageToken", pageToken) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRefreshesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRefreshesRequest.java index 3b3f9433e..4edc8061d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRefreshesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRefreshesRequest.java @@ -10,15 +10,15 @@ @Generated public class ListRefreshesRequest { /** Full name of the table. */ - private String fullName; + private String tableName; - public ListRefreshesRequest setFullName(String fullName) { - this.fullName = fullName; + public ListRefreshesRequest setTableName(String tableName) { + this.tableName = tableName; return this; } - public String getFullName() { - return fullName; + public String getTableName() { + return tableName; } @Override @@ -26,16 +26,16 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ListRefreshesRequest that = (ListRefreshesRequest) o; - return Objects.equals(fullName, that.fullName); + return Objects.equals(tableName, that.tableName); } @Override public int hashCode() { - return Objects.hash(fullName); + return Objects.hash(tableName); } @Override public String toString() { - return new ToStringer(ListRefreshesRequest.class).add("fullName", fullName).toString(); + return new ToStringer(ListRefreshesRequest.class).add("tableName", tableName).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsRequest.java index 9f99fb3c4..3cc8fe9f4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListRegisteredModelsRequest.java @@ -18,9 +18,27 @@ public class ListRegisteredModelsRequest { private String catalogName; /** - * Max number of registered models to return. If catalog and schema are unspecified, max_results - * must be specified. If max_results is unspecified, we return all results, starting from the page - * specified by page_token. + * Whether to include registered models in the response for which the principal can only access + * selective metadata for + */ + @QueryParam("include_browse") + private Boolean includeBrowse; + + /** + * Max number of registered models to return. + * + *

If both catalog and schema are specified: - when max_results is not specified, the page + * length is set to a server configured value (10000, as of 4/2/2024). - when set to a value + * greater than 0, the page length is the minimum of this value and a server configured value + * (10000, as of 4/2/2024); - when set to 0, the page length is set to a server configured value + * (10000, as of 4/2/2024); - when set to a value less than 0, an invalid parameter error is + * returned; + * + *

If neither schema nor catalog is specified: - when max_results is not specified, the page + * length is set to a server configured value (100, as of 4/2/2024). - when set to a value greater + * than 0, the page length is the minimum of this value and a server configured value (1000, as of + * 4/2/2024); - when set to 0, the page length is set to a server configured value (100, as of + * 4/2/2024); - when set to a value less than 0, an invalid parameter error is returned; */ @QueryParam("max_results") private Long maxResults; @@ -45,6 +63,15 @@ public String getCatalogName() { return catalogName; } + public ListRegisteredModelsRequest setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + public ListRegisteredModelsRequest setMaxResults(Long maxResults) { this.maxResults = maxResults; return this; @@ -78,6 +105,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; ListRegisteredModelsRequest that = (ListRegisteredModelsRequest) o; return Objects.equals(catalogName, that.catalogName) + && Objects.equals(includeBrowse, that.includeBrowse) && Objects.equals(maxResults, that.maxResults) && Objects.equals(pageToken, that.pageToken) && Objects.equals(schemaName, that.schemaName); @@ -85,13 +113,14 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(catalogName, maxResults, pageToken, schemaName); + return Objects.hash(catalogName, includeBrowse, maxResults, pageToken, schemaName); } @Override public String toString() { return new ToStringer(ListRegisteredModelsRequest.class) .add("catalogName", catalogName) + .add("includeBrowse", includeBrowse) .add("maxResults", maxResults) .add("pageToken", pageToken) .add("schemaName", schemaName) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasRequest.java index acae1f8b1..43cdd3816 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSchemasRequest.java @@ -14,6 +14,13 @@ public class ListSchemasRequest { @QueryParam("catalog_name") private String catalogName; + /** + * Whether to include schemas in the response for which the principal can only access selective + * metadata for + */ + @QueryParam("include_browse") + private Boolean includeBrowse; + /** * Maximum number of schemas to return. If not set, all the schemas are returned (not * recommended). - when set to a value greater than 0, the page length is the minimum of this @@ -37,6 +44,15 @@ public String getCatalogName() { return catalogName; } + public ListSchemasRequest setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + public ListSchemasRequest setMaxResults(Long maxResults) { this.maxResults = maxResults; return this; @@ -61,19 +77,21 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; ListSchemasRequest that = (ListSchemasRequest) o; return Objects.equals(catalogName, that.catalogName) + && Objects.equals(includeBrowse, that.includeBrowse) && Objects.equals(maxResults, that.maxResults) && Objects.equals(pageToken, that.pageToken); } @Override public int hashCode() { - return Objects.hash(catalogName, maxResults, pageToken); + return Objects.hash(catalogName, includeBrowse, maxResults, pageToken); } @Override public String toString() { return new ToStringer(ListSchemasRequest.class) .add("catalogName", catalogName) + .add("includeBrowse", includeBrowse) .add("maxResults", maxResults) .add("pageToken", pageToken) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequest.java index 7ec16a4bf..f0667e5c5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequest.java @@ -14,6 +14,13 @@ public class ListTablesRequest { @QueryParam("catalog_name") private String catalogName; + /** + * Whether to include tables in the response for which the principal can only access selective + * metadata for + */ + @QueryParam("include_browse") + private Boolean includeBrowse; + /** Whether delta metadata should be included in the response. */ @QueryParam("include_delta_metadata") private Boolean includeDeltaMetadata; @@ -52,6 +59,15 @@ public String getCatalogName() { return catalogName; } + public ListTablesRequest setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + public ListTablesRequest setIncludeDeltaMetadata(Boolean includeDeltaMetadata) { this.includeDeltaMetadata = includeDeltaMetadata; return this; @@ -112,6 +128,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; ListTablesRequest that = (ListTablesRequest) o; return Objects.equals(catalogName, that.catalogName) + && Objects.equals(includeBrowse, that.includeBrowse) && Objects.equals(includeDeltaMetadata, that.includeDeltaMetadata) && Objects.equals(maxResults, that.maxResults) && Objects.equals(omitColumns, that.omitColumns) @@ -124,6 +141,7 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( catalogName, + includeBrowse, includeDeltaMetadata, maxResults, omitColumns, @@ -136,6 +154,7 @@ public int hashCode() { public String toString() { return new ToStringer(ListTablesRequest.class) .add("catalogName", catalogName) + .add("includeBrowse", includeBrowse) .add("includeDeltaMetadata", includeDeltaMetadata) .add("maxResults", maxResults) .add("omitColumns", omitColumns) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesRequest.java index 15c6596b3..0f9095bc1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListVolumesRequest.java @@ -14,6 +14,13 @@ public class ListVolumesRequest { @QueryParam("catalog_name") private String catalogName; + /** + * Whether to include volumes in the response for which the principal can only access selective + * metadata for + */ + @QueryParam("include_browse") + private Boolean includeBrowse; + /** * Maximum number of volumes to return (page length). * @@ -50,6 +57,15 @@ public String getCatalogName() { return catalogName; } + public ListVolumesRequest setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + public ListVolumesRequest setMaxResults(Long maxResults) { this.maxResults = maxResults; return this; @@ -83,6 +99,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; ListVolumesRequest that = (ListVolumesRequest) o; return Objects.equals(catalogName, that.catalogName) + && Objects.equals(includeBrowse, that.includeBrowse) && Objects.equals(maxResults, that.maxResults) && Objects.equals(pageToken, that.pageToken) && Objects.equals(schemaName, that.schemaName); @@ -90,13 +107,14 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(catalogName, maxResults, pageToken, schemaName); + return Objects.hash(catalogName, includeBrowse, maxResults, pageToken, schemaName); } @Override public String toString() { return new ToStringer(ListVolumesRequest.class) .add("catalogName", catalogName) + .add("includeBrowse", includeBrowse) .add("maxResults", maxResults) .add("pageToken", pageToken) .add("schemaName", schemaName) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java index 7e43d4347..c25f726bd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoresAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -114,7 +115,8 @@ public MetastoreInfo get(GetMetastoreRequest request) { * the array. */ public Iterable list() { - return impl.list().getMetastores(); + return new Paginator<>( + null, (Void v) -> impl.list(), ListMetastoresResponse::getMetastores, response -> null); } /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfo.java index a277049ab..87813f1c5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ModelVersionInfo.java @@ -9,6 +9,13 @@ @Generated public class ModelVersionInfo { + /** + * Indicates whether the principal is limited to retrieving metadata for the associated object + * through the BROWSE privilege when include_browse is enabled in the request. + */ + @JsonProperty("browse_only") + private Boolean browseOnly; + /** The name of the catalog containing the model version */ @JsonProperty("catalog_name") private String catalogName; @@ -88,6 +95,15 @@ public class ModelVersionInfo { @JsonProperty("version") private Long version; + public ModelVersionInfo setBrowseOnly(Boolean browseOnly) { + this.browseOnly = browseOnly; + return this; + } + + public Boolean getBrowseOnly() { + return browseOnly; + } + public ModelVersionInfo setCatalogName(String catalogName) { this.catalogName = catalogName; return this; @@ -246,7 +262,8 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ModelVersionInfo that = (ModelVersionInfo) o; - return Objects.equals(catalogName, that.catalogName) + return Objects.equals(browseOnly, that.browseOnly) + && Objects.equals(catalogName, that.catalogName) && Objects.equals(comment, that.comment) && Objects.equals(createdAt, that.createdAt) && Objects.equals(createdBy, that.createdBy) @@ -268,6 +285,7 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( + browseOnly, catalogName, comment, createdAt, @@ -290,6 +308,7 @@ public int hashCode() { @Override public String toString() { return new ToStringer(ModelVersionInfo.class) + .add("browseOnly", browseOnly) .add("catalogName", catalogName) .add("comment", comment) .add("createdAt", createdAt) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedule.java index 1c355bd46..c8135aa11 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedule.java @@ -9,15 +9,20 @@ @Generated public class MonitorCronSchedule { - /** Whether the schedule is paused or not */ + /** Read only field that indicates whether a schedule is paused or not. */ @JsonProperty("pause_status") private MonitorCronSchedulePauseStatus pauseStatus; - /** A cron expression using quartz syntax that describes the schedule for a job. */ + /** + * The expression that determines when to run the monitor. See [examples]. + * + *

[examples]: + * https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html + */ @JsonProperty("quartz_cron_expression") private String quartzCronExpression; - /** A Java timezone id. The schedule for a job will be resolved with respect to this timezone. */ + /** The timezone id (e.g., ``"PST"``) in which to evaluate the quartz expression. */ @JsonProperty("timezone_id") private String timezoneId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedulePauseStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedulePauseStatus.java index 5b59f5385..742f75577 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedulePauseStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCronSchedulePauseStatus.java @@ -4,7 +4,7 @@ import com.databricks.sdk.support.Generated; -/** Whether the schedule is paused or not */ +/** Read only field that indicates whether a schedule is paused or not. */ @Generated public enum MonitorCronSchedulePauseStatus { PAUSED, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCustomMetricType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCustomMetricType.java deleted file mode 100755 index 391515d79..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCustomMetricType.java +++ /dev/null @@ -1,15 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.catalog; - -import com.databricks.sdk.support.Generated; - -/** The type of the custom metric. */ -@Generated -public enum MonitorCustomMetricType { - CUSTOM_METRIC_TYPE_AGGREGATE, - CUSTOM_METRIC_TYPE_DERIVED, - CUSTOM_METRIC_TYPE_DRIFT, - MONITOR_STATUS_ERROR, - MONITOR_STATUS_FAILED, -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDestinations.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDestination.java similarity index 73% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDestinations.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDestination.java index 1354e0944..d34b42ae2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDestinations.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorDestination.java @@ -9,12 +9,15 @@ import java.util.Objects; @Generated -public class MonitorDestinations { - /** The list of email addresses to send the notification to. */ +public class MonitorDestination { + /** + * The list of email addresses to send the notification to. A maximum of 5 email addresses is + * supported. + */ @JsonProperty("email_addresses") private Collection emailAddresses; - public MonitorDestinations setEmailAddresses(Collection emailAddresses) { + public MonitorDestination setEmailAddresses(Collection emailAddresses) { this.emailAddresses = emailAddresses; return this; } @@ -27,7 +30,7 @@ public Collection getEmailAddresses() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - MonitorDestinations that = (MonitorDestinations) o; + MonitorDestination that = (MonitorDestination) o; return Objects.equals(emailAddresses, that.emailAddresses); } @@ -38,7 +41,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(MonitorDestinations.class) + return new ToStringer(MonitorDestination.class) .add("emailAddresses", emailAddresses) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProfileType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLog.java similarity index 56% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProfileType.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLog.java index 875aa0a3d..5ccc716a2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProfileType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLog.java @@ -9,38 +9,57 @@ import java.util.Objects; @Generated -public class MonitorInferenceLogProfileType { +public class MonitorInferenceLog { /** - * List of granularities to use when aggregating data into time windows based on their timestamp. + * Granularities for aggregating data into time windows based on their timestamp. Currently the + * following static granularities are supported: {``"5 minutes"``, ``"30 minutes"``, ``"1 hour"``, + * ``"1 day"``, ``" week(s)"``, ``"1 month"``, ``"1 year"``}. */ @JsonProperty("granularities") private Collection granularities; - /** Column of the model label. */ + /** Optional column that contains the ground truth for the prediction. */ @JsonProperty("label_col") private String labelCol; - /** Column of the model id or version. */ + /** + * Column that contains the id of the model generating the predictions. Metrics will be computed + * per model id by default, and also across all model ids. + */ @JsonProperty("model_id_col") private String modelIdCol; - /** Column of the model prediction. */ + /** Column that contains the output/prediction from the model. */ @JsonProperty("prediction_col") private String predictionCol; - /** Column of the model prediction probabilities. */ + /** + * Optional column that contains the prediction probabilities for each class in a classification + * problem type. The values in this column should be a map, mapping each class label to the + * prediction probability for a given sample. The map should be of PySpark MapType(). + */ @JsonProperty("prediction_proba_col") private String predictionProbaCol; - /** Problem type the model aims to solve. */ + /** + * Problem type the model aims to solve. Determines the type of model-quality metrics that will be + * computed. + */ @JsonProperty("problem_type") - private MonitorInferenceLogProfileTypeProblemType problemType; + private MonitorInferenceLogProblemType problemType; - /** Column of the timestamp of predictions. */ + /** + * Column that contains the timestamps of requests. The column must be one of the following: - A + * ``TimestampType`` column - A column whose values can be converted to timestamps through the + * pyspark ``to_timestamp`` [function]. + * + *

[function]: + * https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html + */ @JsonProperty("timestamp_col") private String timestampCol; - public MonitorInferenceLogProfileType setGranularities(Collection granularities) { + public MonitorInferenceLog setGranularities(Collection granularities) { this.granularities = granularities; return this; } @@ -49,7 +68,7 @@ public Collection getGranularities() { return granularities; } - public MonitorInferenceLogProfileType setLabelCol(String labelCol) { + public MonitorInferenceLog setLabelCol(String labelCol) { this.labelCol = labelCol; return this; } @@ -58,7 +77,7 @@ public String getLabelCol() { return labelCol; } - public MonitorInferenceLogProfileType setModelIdCol(String modelIdCol) { + public MonitorInferenceLog setModelIdCol(String modelIdCol) { this.modelIdCol = modelIdCol; return this; } @@ -67,7 +86,7 @@ public String getModelIdCol() { return modelIdCol; } - public MonitorInferenceLogProfileType setPredictionCol(String predictionCol) { + public MonitorInferenceLog setPredictionCol(String predictionCol) { this.predictionCol = predictionCol; return this; } @@ -76,7 +95,7 @@ public String getPredictionCol() { return predictionCol; } - public MonitorInferenceLogProfileType setPredictionProbaCol(String predictionProbaCol) { + public MonitorInferenceLog setPredictionProbaCol(String predictionProbaCol) { this.predictionProbaCol = predictionProbaCol; return this; } @@ -85,17 +104,16 @@ public String getPredictionProbaCol() { return predictionProbaCol; } - public MonitorInferenceLogProfileType setProblemType( - MonitorInferenceLogProfileTypeProblemType problemType) { + public MonitorInferenceLog setProblemType(MonitorInferenceLogProblemType problemType) { this.problemType = problemType; return this; } - public MonitorInferenceLogProfileTypeProblemType getProblemType() { + public MonitorInferenceLogProblemType getProblemType() { return problemType; } - public MonitorInferenceLogProfileType setTimestampCol(String timestampCol) { + public MonitorInferenceLog setTimestampCol(String timestampCol) { this.timestampCol = timestampCol; return this; } @@ -108,7 +126,7 @@ public String getTimestampCol() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - MonitorInferenceLogProfileType that = (MonitorInferenceLogProfileType) o; + MonitorInferenceLog that = (MonitorInferenceLog) o; return Objects.equals(granularities, that.granularities) && Objects.equals(labelCol, that.labelCol) && Objects.equals(modelIdCol, that.modelIdCol) @@ -132,7 +150,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(MonitorInferenceLogProfileType.class) + return new ToStringer(MonitorInferenceLog.class) .add("granularities", granularities) .add("labelCol", labelCol) .add("modelIdCol", modelIdCol) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProfileTypeProblemType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProblemType.java similarity index 59% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProfileTypeProblemType.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProblemType.java index 11a36da5e..81529bcbe 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProfileTypeProblemType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInferenceLogProblemType.java @@ -4,9 +4,12 @@ import com.databricks.sdk.support.Generated; -/** Problem type the model aims to solve. */ +/** + * Problem type the model aims to solve. Determines the type of model-quality metrics that will be + * computed. + */ @Generated -public enum MonitorInferenceLogProfileTypeProblemType { +public enum MonitorInferenceLogProblemType { PROBLEM_TYPE_CLASSIFICATION, PROBLEM_TYPE_REGRESSION, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfo.java index cd8cfde76..aac4fa412 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorInfo.java @@ -27,9 +27,12 @@ public class MonitorInfo { * time windows). */ @JsonProperty("custom_metrics") - private Collection customMetrics; + private Collection customMetrics; - /** The ID of the generated dashboard. */ + /** + * Id of dashboard that visualizes the computed metrics. This can be empty if the monitor is in + * PENDING state. + */ @JsonProperty("dashboard_id") private String dashboardId; @@ -46,7 +49,7 @@ public class MonitorInfo { /** Configuration for monitoring inference logs. */ @JsonProperty("inference_log") - private MonitorInferenceLogProfileType inferenceLog; + private MonitorInferenceLog inferenceLog; /** The latest failure message of the monitor (if any). */ @JsonProperty("latest_monitor_failure_msg") @@ -58,7 +61,7 @@ public class MonitorInfo { /** The notification settings for the monitor. */ @JsonProperty("notifications") - private Collection notifications; + private MonitorNotifications notifications; /** Schema where output metric tables are created. */ @JsonProperty("output_schema_name") @@ -86,7 +89,7 @@ public class MonitorInfo { /** Configuration for monitoring snapshot tables. */ @JsonProperty("snapshot") - private MonitorSnapshotProfileType snapshot; + private MonitorSnapshot snapshot; /** The status of the monitor. */ @JsonProperty("status") @@ -100,7 +103,7 @@ public class MonitorInfo { /** Configuration for monitoring time series tables. */ @JsonProperty("time_series") - private MonitorTimeSeriesProfileType timeSeries; + private MonitorTimeSeries timeSeries; public MonitorInfo setAssetsDir(String assetsDir) { this.assetsDir = assetsDir; @@ -120,12 +123,12 @@ public String getBaselineTableName() { return baselineTableName; } - public MonitorInfo setCustomMetrics(Collection customMetrics) { + public MonitorInfo setCustomMetrics(Collection customMetrics) { this.customMetrics = customMetrics; return this; } - public Collection getCustomMetrics() { + public Collection getCustomMetrics() { return customMetrics; } @@ -157,12 +160,12 @@ public String getDriftMetricsTableName() { return driftMetricsTableName; } - public MonitorInfo setInferenceLog(MonitorInferenceLogProfileType inferenceLog) { + public MonitorInfo setInferenceLog(MonitorInferenceLog inferenceLog) { this.inferenceLog = inferenceLog; return this; } - public MonitorInferenceLogProfileType getInferenceLog() { + public MonitorInferenceLog getInferenceLog() { return inferenceLog; } @@ -184,12 +187,12 @@ public String getMonitorVersion() { return monitorVersion; } - public MonitorInfo setNotifications(Collection notifications) { + public MonitorInfo setNotifications(MonitorNotifications notifications) { this.notifications = notifications; return this; } - public Collection getNotifications() { + public MonitorNotifications getNotifications() { return notifications; } @@ -229,12 +232,12 @@ public Collection getSlicingExprs() { return slicingExprs; } - public MonitorInfo setSnapshot(MonitorSnapshotProfileType snapshot) { + public MonitorInfo setSnapshot(MonitorSnapshot snapshot) { this.snapshot = snapshot; return this; } - public MonitorSnapshotProfileType getSnapshot() { + public MonitorSnapshot getSnapshot() { return snapshot; } @@ -256,12 +259,12 @@ public String getTableName() { return tableName; } - public MonitorInfo setTimeSeries(MonitorTimeSeriesProfileType timeSeries) { + public MonitorInfo setTimeSeries(MonitorTimeSeries timeSeries) { this.timeSeries = timeSeries; return this; } - public MonitorTimeSeriesProfileType getTimeSeries() { + public MonitorTimeSeries getTimeSeries() { return timeSeries; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCustomMetric.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorMetric.java similarity index 62% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCustomMetric.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorMetric.java index 2167187ba..1f6cdd963 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorCustomMetric.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorMetric.java @@ -9,7 +9,7 @@ import java.util.Objects; @Generated -public class MonitorCustomMetric { +public class MonitorMetric { /** * Jinja template for a SQL expression that specifies how to compute the metric. See [create * metric definition]. @@ -20,11 +20,14 @@ public class MonitorCustomMetric { @JsonProperty("definition") private String definition; - /** Columns on the monitored table to apply the custom metrics to. */ + /** + * A list of column names in the input table the metric should be computed for. Can use + * ``":table"`` to indicate that the metric needs information from multiple columns. + */ @JsonProperty("input_columns") private Collection inputColumns; - /** Name of the custom metric. */ + /** Name of the metric in the output tables. */ @JsonProperty("name") private String name; @@ -32,11 +35,19 @@ public class MonitorCustomMetric { @JsonProperty("output_data_type") private String outputDataType; - /** The type of the custom metric. */ + /** + * Can only be one of ``"CUSTOM_METRIC_TYPE_AGGREGATE"``, ``"CUSTOM_METRIC_TYPE_DERIVED"``, or + * ``"CUSTOM_METRIC_TYPE_DRIFT"``. The ``"CUSTOM_METRIC_TYPE_AGGREGATE"`` and + * ``"CUSTOM_METRIC_TYPE_DERIVED"`` metrics are computed on a single table, whereas the + * ``"CUSTOM_METRIC_TYPE_DRIFT"`` compare metrics across baseline and input table, or across the + * two consecutive time windows. - CUSTOM_METRIC_TYPE_AGGREGATE: only depend on the existing + * columns in your table - CUSTOM_METRIC_TYPE_DERIVED: depend on previously computed aggregate + * metrics - CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics + */ @JsonProperty("type") - private MonitorCustomMetricType typeValue; + private MonitorMetricType typeValue; - public MonitorCustomMetric setDefinition(String definition) { + public MonitorMetric setDefinition(String definition) { this.definition = definition; return this; } @@ -45,7 +56,7 @@ public String getDefinition() { return definition; } - public MonitorCustomMetric setInputColumns(Collection inputColumns) { + public MonitorMetric setInputColumns(Collection inputColumns) { this.inputColumns = inputColumns; return this; } @@ -54,7 +65,7 @@ public Collection getInputColumns() { return inputColumns; } - public MonitorCustomMetric setName(String name) { + public MonitorMetric setName(String name) { this.name = name; return this; } @@ -63,7 +74,7 @@ public String getName() { return name; } - public MonitorCustomMetric setOutputDataType(String outputDataType) { + public MonitorMetric setOutputDataType(String outputDataType) { this.outputDataType = outputDataType; return this; } @@ -72,12 +83,12 @@ public String getOutputDataType() { return outputDataType; } - public MonitorCustomMetric setType(MonitorCustomMetricType typeValue) { + public MonitorMetric setType(MonitorMetricType typeValue) { this.typeValue = typeValue; return this; } - public MonitorCustomMetricType getType() { + public MonitorMetricType getType() { return typeValue; } @@ -85,7 +96,7 @@ public MonitorCustomMetricType getType() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - MonitorCustomMetric that = (MonitorCustomMetric) o; + MonitorMetric that = (MonitorMetric) o; return Objects.equals(definition, that.definition) && Objects.equals(inputColumns, that.inputColumns) && Objects.equals(name, that.name) @@ -100,7 +111,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(MonitorCustomMetric.class) + return new ToStringer(MonitorMetric.class) .add("definition", definition) .add("inputColumns", inputColumns) .add("name", name) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorMetricType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorMetricType.java new file mode 100755 index 000000000..e5020fecf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorMetricType.java @@ -0,0 +1,21 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +/** + * Can only be one of ``"CUSTOM_METRIC_TYPE_AGGREGATE"``, ``"CUSTOM_METRIC_TYPE_DERIVED"``, or + * ``"CUSTOM_METRIC_TYPE_DRIFT"``. The ``"CUSTOM_METRIC_TYPE_AGGREGATE"`` and + * ``"CUSTOM_METRIC_TYPE_DERIVED"`` metrics are computed on a single table, whereas the + * ``"CUSTOM_METRIC_TYPE_DRIFT"`` compare metrics across baseline and input table, or across the two + * consecutive time windows. - CUSTOM_METRIC_TYPE_AGGREGATE: only depend on the existing columns in + * your table - CUSTOM_METRIC_TYPE_DERIVED: depend on previously computed aggregate metrics - + * CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics + */ +@Generated +public enum MonitorMetricType { + CUSTOM_METRIC_TYPE_AGGREGATE, + CUSTOM_METRIC_TYPE_DERIVED, + CUSTOM_METRIC_TYPE_DRIFT, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorNotifications.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorNotifications.java new file mode 100755 index 000000000..6586c8498 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorNotifications.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class MonitorNotifications { + /** Who to send notifications to on monitor failure. */ + @JsonProperty("on_failure") + private MonitorDestination onFailure; + + /** Who to send notifications to when new data classification tags are detected. */ + @JsonProperty("on_new_classification_tag_detected") + private MonitorDestination onNewClassificationTagDetected; + + public MonitorNotifications setOnFailure(MonitorDestination onFailure) { + this.onFailure = onFailure; + return this; + } + + public MonitorDestination getOnFailure() { + return onFailure; + } + + public MonitorNotifications setOnNewClassificationTagDetected( + MonitorDestination onNewClassificationTagDetected) { + this.onNewClassificationTagDetected = onNewClassificationTagDetected; + return this; + } + + public MonitorDestination getOnNewClassificationTagDetected() { + return onNewClassificationTagDetected; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MonitorNotifications that = (MonitorNotifications) o; + return Objects.equals(onFailure, that.onFailure) + && Objects.equals(onNewClassificationTagDetected, that.onNewClassificationTagDetected); + } + + @Override + public int hashCode() { + return Objects.hash(onFailure, onNewClassificationTagDetected); + } + + @Override + public String toString() { + return new ToStringer(MonitorNotifications.class) + .add("onFailure", onFailure) + .add("onNewClassificationTagDetected", onNewClassificationTagDetected) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorNotificationsConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorNotificationsConfig.java deleted file mode 100755 index 68666f166..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorNotificationsConfig.java +++ /dev/null @@ -1,42 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.catalog; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class MonitorNotificationsConfig { - /** Who to send notifications to on monitor failure. */ - @JsonProperty("on_failure") - private MonitorDestinations onFailure; - - public MonitorNotificationsConfig setOnFailure(MonitorDestinations onFailure) { - this.onFailure = onFailure; - return this; - } - - public MonitorDestinations getOnFailure() { - return onFailure; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - MonitorNotificationsConfig that = (MonitorNotificationsConfig) o; - return Objects.equals(onFailure, that.onFailure); - } - - @Override - public int hashCode() { - return Objects.hash(onFailure); - } - - @Override - public String toString() { - return new ToStringer(MonitorNotificationsConfig.class).add("onFailure", onFailure).toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshInfo.java index 42174fe21..15094c0fb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshInfo.java @@ -9,7 +9,7 @@ @Generated public class MonitorRefreshInfo { - /** The time at which the refresh ended, in epoch milliseconds. */ + /** Time at which refresh operation completed (milliseconds since 1/1/1970 UTC). */ @JsonProperty("end_time_ms") private Long endTimeMs; @@ -19,11 +19,11 @@ public class MonitorRefreshInfo { @JsonProperty("message") private String message; - /** The ID of the refresh. */ + /** Unique id of the refresh operation. */ @JsonProperty("refresh_id") private Long refreshId; - /** The time at which the refresh started, in epoch milliseconds. */ + /** Time at which refresh operation was initiated (milliseconds since 1/1/1970 UTC). */ @JsonProperty("start_time_ms") private Long startTimeMs; @@ -31,6 +31,10 @@ public class MonitorRefreshInfo { @JsonProperty("state") private MonitorRefreshInfoState state; + /** The method by which the refresh was triggered. */ + @JsonProperty("trigger") + private MonitorRefreshInfoTrigger trigger; + public MonitorRefreshInfo setEndTimeMs(Long endTimeMs) { this.endTimeMs = endTimeMs; return this; @@ -76,6 +80,15 @@ public MonitorRefreshInfoState getState() { return state; } + public MonitorRefreshInfo setTrigger(MonitorRefreshInfoTrigger trigger) { + this.trigger = trigger; + return this; + } + + public MonitorRefreshInfoTrigger getTrigger() { + return trigger; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -85,12 +98,13 @@ public boolean equals(Object o) { && Objects.equals(message, that.message) && Objects.equals(refreshId, that.refreshId) && Objects.equals(startTimeMs, that.startTimeMs) - && Objects.equals(state, that.state); + && Objects.equals(state, that.state) + && Objects.equals(trigger, that.trigger); } @Override public int hashCode() { - return Objects.hash(endTimeMs, message, refreshId, startTimeMs, state); + return Objects.hash(endTimeMs, message, refreshId, startTimeMs, state, trigger); } @Override @@ -101,6 +115,7 @@ public String toString() { .add("refreshId", refreshId) .add("startTimeMs", startTimeMs) .add("state", state) + .add("trigger", trigger) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshInfoTrigger.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshInfoTrigger.java new file mode 100755 index 000000000..906a1c881 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefreshInfoTrigger.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.catalog; + +import com.databricks.sdk.support.Generated; + +/** The method by which the refresh was triggered. */ +@Generated +public enum MonitorRefreshInfoTrigger { + MANUAL, + SCHEDULE, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorSnapshotProfileType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorSnapshot.java similarity index 82% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorSnapshotProfileType.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorSnapshot.java index bd2e8f0e0..c2c63dd78 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorSnapshotProfileType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorSnapshot.java @@ -7,7 +7,7 @@ import java.util.Objects; @Generated -public class MonitorSnapshotProfileType { +public class MonitorSnapshot { @Override public boolean equals(Object o) { @@ -23,6 +23,6 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(MonitorSnapshotProfileType.class).toString(); + return new ToStringer(MonitorSnapshot.class).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeriesProfileType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeries.java similarity index 58% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeriesProfileType.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeries.java index ee757bd08..8ad8758fd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeriesProfileType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeries.java @@ -9,21 +9,27 @@ import java.util.Objects; @Generated -public class MonitorTimeSeriesProfileType { +public class MonitorTimeSeries { /** - * List of granularities to use when aggregating data into time windows based on their timestamp. + * Granularities for aggregating data into time windows based on their timestamp. Currently the + * following static granularities are supported: {``"5 minutes"``, ``"30 minutes"``, ``"1 hour"``, + * ``"1 day"``, ``" week(s)"``, ``"1 month"``, ``"1 year"``}. */ @JsonProperty("granularities") private Collection granularities; /** - * The timestamp column. This must be timestamp types or convertible to timestamp types using the - * pyspark to_timestamp function. + * Column that contains the timestamps of requests. The column must be one of the following: - A + * ``TimestampType`` column - A column whose values can be converted to timestamps through the + * pyspark ``to_timestamp`` [function]. + * + *

[function]: + * https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html */ @JsonProperty("timestamp_col") private String timestampCol; - public MonitorTimeSeriesProfileType setGranularities(Collection granularities) { + public MonitorTimeSeries setGranularities(Collection granularities) { this.granularities = granularities; return this; } @@ -32,7 +38,7 @@ public Collection getGranularities() { return granularities; } - public MonitorTimeSeriesProfileType setTimestampCol(String timestampCol) { + public MonitorTimeSeries setTimestampCol(String timestampCol) { this.timestampCol = timestampCol; return this; } @@ -45,7 +51,7 @@ public String getTimestampCol() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - MonitorTimeSeriesProfileType that = (MonitorTimeSeriesProfileType) o; + MonitorTimeSeries that = (MonitorTimeSeries) o; return Objects.equals(granularities, that.granularities) && Objects.equals(timestampCol, that.timestampCol); } @@ -57,7 +63,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(MonitorTimeSeriesProfileType.class) + return new ToStringer(MonitorTimeSeries.class) .add("granularities", granularities) .add("timestampCol", timestampCol) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java index 98495b2eb..32f99a526 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesAPI.java @@ -28,7 +28,7 @@ public OnlineTablesAPI(OnlineTablesService mock) { * *

Create a new Online Table. */ - public OnlineTable create(ViewData request) { + public OnlineTable create(CreateOnlineTableRequest request) { return impl.create(request); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java index 3f300ec2c..3b29957f1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesImpl.java @@ -16,7 +16,7 @@ public OnlineTablesImpl(ApiClient apiClient) { } @Override - public OnlineTable create(ViewData request) { + public OnlineTable create(CreateOnlineTableRequest request) { String path = "/api/2.0/online-tables"; Map headers = new HashMap<>(); headers.put("Accept", "application/json"); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesService.java index 97bd017fa..e18d13cd0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTablesService.java @@ -17,7 +17,7 @@ public interface OnlineTablesService { * *

Create a new Online Table. */ - OnlineTable create(ViewData viewData); + OnlineTable create(CreateOnlineTableRequest createOnlineTableRequest); /** * Delete an Online Table. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Privilege.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Privilege.java index d76db37fb..df485f25e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Privilege.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Privilege.java @@ -6,6 +6,7 @@ @Generated public enum Privilege { + ACCESS, ALL_PRIVILEGES, APPLY_TAG, CREATE, @@ -22,6 +23,7 @@ public enum Privilege { CREATE_PROVIDER, CREATE_RECIPIENT, CREATE_SCHEMA, + CREATE_SERVICE_CREDENTIAL, CREATE_SHARE, CREATE_STORAGE_CREDENTIAL, CREATE_TABLE, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ReadVolumeRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ReadVolumeRequest.java index cfc998d09..bf7bf0f60 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ReadVolumeRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ReadVolumeRequest.java @@ -3,15 +3,32 @@ package com.databricks.sdk.service.catalog; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; import com.databricks.sdk.support.ToStringer; import java.util.Objects; /** Get a Volume */ @Generated public class ReadVolumeRequest { + /** + * Whether to include volumes in the response for which the principal can only access selective + * metadata for + */ + @QueryParam("include_browse") + private Boolean includeBrowse; + /** The three-level (fully qualified) name of the volume */ private String name; + public ReadVolumeRequest setIncludeBrowse(Boolean includeBrowse) { + this.includeBrowse = includeBrowse; + return this; + } + + public Boolean getIncludeBrowse() { + return includeBrowse; + } + public ReadVolumeRequest setName(String name) { this.name = name; return this; @@ -26,16 +43,19 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ReadVolumeRequest that = (ReadVolumeRequest) o; - return Objects.equals(name, that.name); + return Objects.equals(includeBrowse, that.includeBrowse) && Objects.equals(name, that.name); } @Override public int hashCode() { - return Objects.hash(name); + return Objects.hash(includeBrowse, name); } @Override public String toString() { - return new ToStringer(ReadVolumeRequest.class).add("name", name).toString(); + return new ToStringer(ReadVolumeRequest.class) + .add("includeBrowse", includeBrowse) + .add("name", name) + .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelInfo.java index 2437e9e84..072d58d13 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelInfo.java @@ -14,6 +14,13 @@ public class RegisteredModelInfo { @JsonProperty("aliases") private Collection aliases; + /** + * Indicates whether the principal is limited to retrieving metadata for the associated object + * through the BROWSE privilege when include_browse is enabled in the request. + */ + @JsonProperty("browse_only") + private Boolean browseOnly; + /** The name of the catalog where the schema and the registered model reside */ @JsonProperty("catalog_name") private String catalogName; @@ -71,6 +78,15 @@ public Collection getAliases() { return aliases; } + public RegisteredModelInfo setBrowseOnly(Boolean browseOnly) { + this.browseOnly = browseOnly; + return this; + } + + public Boolean getBrowseOnly() { + return browseOnly; + } + public RegisteredModelInfo setCatalogName(String catalogName) { this.catalogName = catalogName; return this; @@ -185,6 +201,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; RegisteredModelInfo that = (RegisteredModelInfo) o; return Objects.equals(aliases, that.aliases) + && Objects.equals(browseOnly, that.browseOnly) && Objects.equals(catalogName, that.catalogName) && Objects.equals(comment, that.comment) && Objects.equals(createdAt, that.createdAt) @@ -203,6 +220,7 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( aliases, + browseOnly, catalogName, comment, createdAt, @@ -221,6 +239,7 @@ public int hashCode() { public String toString() { return new ToStringer(RegisteredModelInfo.class) .add("aliases", aliases) + .add("browseOnly", browseOnly) .add("catalogName", catalogName) .add("comment", comment) .add("createdAt", createdAt) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RunRefreshRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RunRefreshRequest.java index 779d79d78..c39338c10 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RunRefreshRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RunRefreshRequest.java @@ -10,15 +10,15 @@ @Generated public class RunRefreshRequest { /** Full name of the table. */ - private String fullName; + private String tableName; - public RunRefreshRequest setFullName(String fullName) { - this.fullName = fullName; + public RunRefreshRequest setTableName(String tableName) { + this.tableName = tableName; return this; } - public String getFullName() { - return fullName; + public String getTableName() { + return tableName; } @Override @@ -26,16 +26,16 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; RunRefreshRequest that = (RunRefreshRequest) o; - return Objects.equals(fullName, that.fullName); + return Objects.equals(tableName, that.tableName); } @Override public int hashCode() { - return Objects.hash(fullName); + return Objects.hash(tableName); } @Override public String toString() { - return new ToStringer(RunRefreshRequest.class).add("fullName", fullName).toString(); + return new ToStringer(RunRefreshRequest.class).add("tableName", tableName).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java index 1d010259a..dc899512e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.java @@ -10,6 +10,13 @@ @Generated public class SchemaInfo { + /** + * Indicates whether the principal is limited to retrieving metadata for the associated object + * through the BROWSE privilege when include_browse is enabled in the request. + */ + @JsonProperty("browse_only") + private Boolean browseOnly; + /** Name of parent catalog. */ @JsonProperty("catalog_name") private String catalogName; @@ -58,6 +65,10 @@ public class SchemaInfo { @JsonProperty("properties") private Map properties; + /** The unique identifier of the schema. */ + @JsonProperty("schema_id") + private String schemaId; + /** Storage location for managed tables within schema. */ @JsonProperty("storage_location") private String storageLocation; @@ -74,6 +85,15 @@ public class SchemaInfo { @JsonProperty("updated_by") private String updatedBy; + public SchemaInfo setBrowseOnly(Boolean browseOnly) { + this.browseOnly = browseOnly; + return this; + } + + public Boolean getBrowseOnly() { + return browseOnly; + } + public SchemaInfo setCatalogName(String catalogName) { this.catalogName = catalogName; return this; @@ -184,6 +204,15 @@ public Map getProperties() { return properties; } + public SchemaInfo setSchemaId(String schemaId) { + this.schemaId = schemaId; + return this; + } + + public String getSchemaId() { + return schemaId; + } + public SchemaInfo setStorageLocation(String storageLocation) { this.storageLocation = storageLocation; return this; @@ -225,7 +254,8 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; SchemaInfo that = (SchemaInfo) o; - return Objects.equals(catalogName, that.catalogName) + return Objects.equals(browseOnly, that.browseOnly) + && Objects.equals(catalogName, that.catalogName) && Objects.equals(catalogType, that.catalogType) && Objects.equals(comment, that.comment) && Objects.equals(createdAt, that.createdAt) @@ -238,6 +268,7 @@ public boolean equals(Object o) { && Objects.equals(name, that.name) && Objects.equals(owner, that.owner) && Objects.equals(properties, that.properties) + && Objects.equals(schemaId, that.schemaId) && Objects.equals(storageLocation, that.storageLocation) && Objects.equals(storageRoot, that.storageRoot) && Objects.equals(updatedAt, that.updatedAt) @@ -247,6 +278,7 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( + browseOnly, catalogName, catalogType, comment, @@ -259,6 +291,7 @@ public int hashCode() { name, owner, properties, + schemaId, storageLocation, storageRoot, updatedAt, @@ -268,6 +301,7 @@ public int hashCode() { @Override public String toString() { return new ToStringer(SchemaInfo.class) + .add("browseOnly", browseOnly) .add("catalogName", catalogName) .add("catalogType", catalogType) .add("comment", comment) @@ -280,6 +314,7 @@ public String toString() { .add("name", name) .add("owner", owner) .add("properties", properties) + .add("schemaId", schemaId) .add("storageLocation", storageLocation) .add("storageRoot", storageRoot) .add("updatedAt", updatedAt) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java index 64eb74554..441636890 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java @@ -81,9 +81,8 @@ public Iterable list(String catalogName) { *

Gets an array of schemas for a catalog in the metastore. If the caller is the metastore * admin or the owner of the parent catalog, all schemas for the catalog will be retrieved. * Otherwise, only schemas owned by the caller (or for which the caller has the **USE_SCHEMA** - * privilege) will be retrieved. For unpaginated request, there is no guarantee of a specific - * ordering of the elements in the array. For paginated request, elements are ordered by their - * name. + * privilege) will be retrieved. There is no guarantee of a specific ordering of the elements in + * the array. */ public Iterable list(ListSchemasRequest request) { return new Paginator<>( diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java index 68f384c70..e18efa0d8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java @@ -45,9 +45,8 @@ public interface SchemasService { *

Gets an array of schemas for a catalog in the metastore. If the caller is the metastore * admin or the owner of the parent catalog, all schemas for the catalog will be retrieved. * Otherwise, only schemas owned by the caller (or for which the caller has the **USE_SCHEMA** - * privilege) will be retrieved. For unpaginated request, there is no guarantee of a specific - * ordering of the elements in the array. For paginated request, elements are ordered by their - * name. + * privilege) will be retrieved. There is no guarantee of a specific ordering of the elements in + * the array. */ ListSchemasResponse list(ListSchemasRequest listSchemasRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialInfo.java index 2046f44ff..7a580ad73 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialInfo.java @@ -11,11 +11,11 @@ public class StorageCredentialInfo { /** The AWS IAM role configuration. */ @JsonProperty("aws_iam_role") - private AwsIamRole awsIamRole; + private AwsIamRoleResponse awsIamRole; /** The Azure managed identity configuration. */ @JsonProperty("azure_managed_identity") - private AzureManagedIdentity azureManagedIdentity; + private AzureManagedIdentityResponse azureManagedIdentity; /** The Azure service principal configuration. */ @JsonProperty("azure_service_principal") @@ -73,21 +73,22 @@ public class StorageCredentialInfo { @JsonProperty("used_for_managed_storage") private Boolean usedForManagedStorage; - public StorageCredentialInfo setAwsIamRole(AwsIamRole awsIamRole) { + public StorageCredentialInfo setAwsIamRole(AwsIamRoleResponse awsIamRole) { this.awsIamRole = awsIamRole; return this; } - public AwsIamRole getAwsIamRole() { + public AwsIamRoleResponse getAwsIamRole() { return awsIamRole; } - public StorageCredentialInfo setAzureManagedIdentity(AzureManagedIdentity azureManagedIdentity) { + public StorageCredentialInfo setAzureManagedIdentity( + AzureManagedIdentityResponse azureManagedIdentity) { this.azureManagedIdentity = azureManagedIdentity; return this; } - public AzureManagedIdentity getAzureManagedIdentity() { + public AzureManagedIdentityResponse getAzureManagedIdentity() { return azureManagedIdentity; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java index 1c20bfd60..69d3b4f87 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsAPI.java @@ -82,9 +82,8 @@ public StorageCredentialInfo get(GetStorageCredentialRequest request) { * *

Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is * limited to only those storage credentials the caller has permission to access. If the caller is - * a metastore admin, retrieval of credentials is unrestricted. For unpaginated request, there is - * no guarantee of a specific ordering of the elements in the array. For paginated request, - * elements are ordered by their name. + * a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a + * specific ordering of the elements in the array. */ public Iterable list(ListStorageCredentialsRequest request) { return new Paginator<>( diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java index d6de7f68f..23af9af76 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialsService.java @@ -50,9 +50,8 @@ public interface StorageCredentialsService { * *

Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is * limited to only those storage credentials the caller has permission to access. If the caller is - * a metastore admin, retrieval of credentials is unrestricted. For unpaginated request, there is - * no guarantee of a specific ordering of the elements in the array. For paginated request, - * elements are ordered by their name. + * a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a + * specific ordering of the elements in the array. */ ListStorageCredentialsResponse list(ListStorageCredentialsRequest listStorageCredentialsRequest); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java index e28ccc54b..626f204e2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SystemSchemasAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -66,7 +67,8 @@ public Iterable list(String metastoreId) { * metastore admin. */ public Iterable list(ListSystemSchemasRequest request) { - return impl.list(request).getSchemas(); + return new Paginator<>( + request, impl::list, ListSystemSchemasResponse::getSchemas, response -> null); } public SystemSchemasService impl() { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java index f84d5ca58..7208948cb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java @@ -15,6 +15,13 @@ public class TableInfo { @JsonProperty("access_point") private String accessPoint; + /** + * Indicates whether the principal is limited to retrieving metadata for the associated object + * through the BROWSE privilege when include_browse is enabled in the request. + */ + @JsonProperty("browse_only") + private Boolean browseOnly; + /** Name of parent catalog. */ @JsonProperty("catalog_name") private String catalogName; @@ -122,7 +129,7 @@ public class TableInfo { @JsonProperty("table_constraints") private Collection tableConstraints; - /** Name of table, relative to parent schema. */ + /** The unique identifier of the table. */ @JsonProperty("table_id") private String tableId; @@ -163,6 +170,15 @@ public String getAccessPoint() { return accessPoint; } + public TableInfo setBrowseOnly(Boolean browseOnly) { + this.browseOnly = browseOnly; + return this; + } + + public Boolean getBrowseOnly() { + return browseOnly; + } + public TableInfo setCatalogName(String catalogName) { this.catalogName = catalogName; return this; @@ -442,6 +458,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; TableInfo that = (TableInfo) o; return Objects.equals(accessPoint, that.accessPoint) + && Objects.equals(browseOnly, that.browseOnly) && Objects.equals(catalogName, that.catalogName) && Objects.equals(columns, that.columns) && Objects.equals(comment, that.comment) @@ -479,6 +496,7 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( accessPoint, + browseOnly, catalogName, columns, comment, @@ -515,6 +533,7 @@ public int hashCode() { public String toString() { return new ToStringer(TableInfo.class) .add("accessPoint", accessPoint) + .add("browseOnly", browseOnly) .add("catalogName", catalogName) .add("columns", columns) .add("comment", comment) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableRowFilter.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableRowFilter.java index 70347fa79..bd53b8d98 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableRowFilter.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableRowFilter.java @@ -10,6 +10,10 @@ @Generated public class TableRowFilter { + /** The full name of the row filter SQL UDF. */ + @JsonProperty("function_name") + private String functionName; + /** * The list of table columns to be passed as input to the row filter function. The column types * should match the types of the filter function arguments. @@ -17,26 +21,22 @@ public class TableRowFilter { @JsonProperty("input_column_names") private Collection inputColumnNames; - /** The full name of the row filter SQL UDF. */ - @JsonProperty("name") - private String name; - - public TableRowFilter setInputColumnNames(Collection inputColumnNames) { - this.inputColumnNames = inputColumnNames; + public TableRowFilter setFunctionName(String functionName) { + this.functionName = functionName; return this; } - public Collection getInputColumnNames() { - return inputColumnNames; + public String getFunctionName() { + return functionName; } - public TableRowFilter setName(String name) { - this.name = name; + public TableRowFilter setInputColumnNames(Collection inputColumnNames) { + this.inputColumnNames = inputColumnNames; return this; } - public String getName() { - return name; + public Collection getInputColumnNames() { + return inputColumnNames; } @Override @@ -44,20 +44,20 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; TableRowFilter that = (TableRowFilter) o; - return Objects.equals(inputColumnNames, that.inputColumnNames) - && Objects.equals(name, that.name); + return Objects.equals(functionName, that.functionName) + && Objects.equals(inputColumnNames, that.inputColumnNames); } @Override public int hashCode() { - return Objects.hash(inputColumnNames, name); + return Objects.hash(functionName, inputColumnNames); } @Override public String toString() { return new ToStringer(TableRowFilter.class) + .add("functionName", functionName) .add("inputColumnNames", inputColumnNames) - .add("name", name) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMonitor.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMonitor.java index b8e73eca9..9e9130b8c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMonitor.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateMonitor.java @@ -23,22 +23,19 @@ public class UpdateMonitor { * time windows). */ @JsonProperty("custom_metrics") - private Collection customMetrics; + private Collection customMetrics; /** The data classification config for the monitor. */ @JsonProperty("data_classification_config") private MonitorDataClassificationConfig dataClassificationConfig; - /** Full name of the table. */ - private String fullName; - /** Configuration for monitoring inference logs. */ @JsonProperty("inference_log") - private MonitorInferenceLogProfileType inferenceLog; + private MonitorInferenceLog inferenceLog; /** The notification settings for the monitor. */ @JsonProperty("notifications") - private Collection notifications; + private MonitorNotifications notifications; /** Schema where output metric tables are created. */ @JsonProperty("output_schema_name") @@ -59,11 +56,14 @@ public class UpdateMonitor { /** Configuration for monitoring snapshot tables. */ @JsonProperty("snapshot") - private MonitorSnapshotProfileType snapshot; + private MonitorSnapshot snapshot; + + /** Full name of the table. */ + private String tableName; /** Configuration for monitoring time series tables. */ @JsonProperty("time_series") - private MonitorTimeSeriesProfileType timeSeries; + private MonitorTimeSeries timeSeries; public UpdateMonitor setBaselineTableName(String baselineTableName) { this.baselineTableName = baselineTableName; @@ -74,12 +74,12 @@ public String getBaselineTableName() { return baselineTableName; } - public UpdateMonitor setCustomMetrics(Collection customMetrics) { + public UpdateMonitor setCustomMetrics(Collection customMetrics) { this.customMetrics = customMetrics; return this; } - public Collection getCustomMetrics() { + public Collection getCustomMetrics() { return customMetrics; } @@ -93,30 +93,21 @@ public MonitorDataClassificationConfig getDataClassificationConfig() { return dataClassificationConfig; } - public UpdateMonitor setFullName(String fullName) { - this.fullName = fullName; - return this; - } - - public String getFullName() { - return fullName; - } - - public UpdateMonitor setInferenceLog(MonitorInferenceLogProfileType inferenceLog) { + public UpdateMonitor setInferenceLog(MonitorInferenceLog inferenceLog) { this.inferenceLog = inferenceLog; return this; } - public MonitorInferenceLogProfileType getInferenceLog() { + public MonitorInferenceLog getInferenceLog() { return inferenceLog; } - public UpdateMonitor setNotifications(Collection notifications) { + public UpdateMonitor setNotifications(MonitorNotifications notifications) { this.notifications = notifications; return this; } - public Collection getNotifications() { + public MonitorNotifications getNotifications() { return notifications; } @@ -147,21 +138,30 @@ public Collection getSlicingExprs() { return slicingExprs; } - public UpdateMonitor setSnapshot(MonitorSnapshotProfileType snapshot) { + public UpdateMonitor setSnapshot(MonitorSnapshot snapshot) { this.snapshot = snapshot; return this; } - public MonitorSnapshotProfileType getSnapshot() { + public MonitorSnapshot getSnapshot() { return snapshot; } - public UpdateMonitor setTimeSeries(MonitorTimeSeriesProfileType timeSeries) { + public UpdateMonitor setTableName(String tableName) { + this.tableName = tableName; + return this; + } + + public String getTableName() { + return tableName; + } + + public UpdateMonitor setTimeSeries(MonitorTimeSeries timeSeries) { this.timeSeries = timeSeries; return this; } - public MonitorTimeSeriesProfileType getTimeSeries() { + public MonitorTimeSeries getTimeSeries() { return timeSeries; } @@ -173,13 +173,13 @@ public boolean equals(Object o) { return Objects.equals(baselineTableName, that.baselineTableName) && Objects.equals(customMetrics, that.customMetrics) && Objects.equals(dataClassificationConfig, that.dataClassificationConfig) - && Objects.equals(fullName, that.fullName) && Objects.equals(inferenceLog, that.inferenceLog) && Objects.equals(notifications, that.notifications) && Objects.equals(outputSchemaName, that.outputSchemaName) && Objects.equals(schedule, that.schedule) && Objects.equals(slicingExprs, that.slicingExprs) && Objects.equals(snapshot, that.snapshot) + && Objects.equals(tableName, that.tableName) && Objects.equals(timeSeries, that.timeSeries); } @@ -189,13 +189,13 @@ public int hashCode() { baselineTableName, customMetrics, dataClassificationConfig, - fullName, inferenceLog, notifications, outputSchemaName, schedule, slicingExprs, snapshot, + tableName, timeSeries); } @@ -205,13 +205,13 @@ public String toString() { .add("baselineTableName", baselineTableName) .add("customMetrics", customMetrics) .add("dataClassificationConfig", dataClassificationConfig) - .add("fullName", fullName) .add("inferenceLog", inferenceLog) .add("notifications", notifications) .add("outputSchemaName", outputSchemaName) .add("schedule", schedule) .add("slicingExprs", slicingExprs) .add("snapshot", snapshot) + .add("tableName", tableName) .add("timeSeries", timeSeries) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java index 873d551f5..29c98b451 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateStorageCredential.java @@ -11,11 +11,11 @@ public class UpdateStorageCredential { /** The AWS IAM role configuration. */ @JsonProperty("aws_iam_role") - private AwsIamRole awsIamRole; + private AwsIamRoleRequest awsIamRole; /** The Azure managed identity configuration. */ @JsonProperty("azure_managed_identity") - private AzureManagedIdentity azureManagedIdentity; + private AzureManagedIdentityResponse azureManagedIdentity; /** The Azure service principal configuration. */ @JsonProperty("azure_service_principal") @@ -56,22 +56,22 @@ public class UpdateStorageCredential { @JsonProperty("skip_validation") private Boolean skipValidation; - public UpdateStorageCredential setAwsIamRole(AwsIamRole awsIamRole) { + public UpdateStorageCredential setAwsIamRole(AwsIamRoleRequest awsIamRole) { this.awsIamRole = awsIamRole; return this; } - public AwsIamRole getAwsIamRole() { + public AwsIamRoleRequest getAwsIamRole() { return awsIamRole; } public UpdateStorageCredential setAzureManagedIdentity( - AzureManagedIdentity azureManagedIdentity) { + AzureManagedIdentityResponse azureManagedIdentity) { this.azureManagedIdentity = azureManagedIdentity; return this; } - public AzureManagedIdentity getAzureManagedIdentity() { + public AzureManagedIdentityResponse getAzureManagedIdentity() { return azureManagedIdentity; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredential.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredential.java index 405cdd428..23fb6866a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredential.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidateStorageCredential.java @@ -11,11 +11,11 @@ public class ValidateStorageCredential { /** The AWS IAM role configuration. */ @JsonProperty("aws_iam_role") - private AwsIamRole awsIamRole; + private AwsIamRoleRequest awsIamRole; /** The Azure managed identity configuration. */ @JsonProperty("azure_managed_identity") - private AzureManagedIdentity azureManagedIdentity; + private AzureManagedIdentityRequest azureManagedIdentity; /** The Azure service principal configuration. */ @JsonProperty("azure_service_principal") @@ -45,22 +45,22 @@ public class ValidateStorageCredential { @JsonProperty("url") private String url; - public ValidateStorageCredential setAwsIamRole(AwsIamRole awsIamRole) { + public ValidateStorageCredential setAwsIamRole(AwsIamRoleRequest awsIamRole) { this.awsIamRole = awsIamRole; return this; } - public AwsIamRole getAwsIamRole() { + public AwsIamRoleRequest getAwsIamRole() { return awsIamRole; } public ValidateStorageCredential setAzureManagedIdentity( - AzureManagedIdentity azureManagedIdentity) { + AzureManagedIdentityRequest azureManagedIdentity) { this.azureManagedIdentity = azureManagedIdentity; return this; } - public AzureManagedIdentity getAzureManagedIdentity() { + public AzureManagedIdentityRequest getAzureManagedIdentity() { return azureManagedIdentity; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResultOperation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResultOperation.java index 7337f8c88..d0a625941 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResultOperation.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ValidationResultOperation.java @@ -9,6 +9,7 @@ public enum ValidationResultOperation { DELETE, LIST, + PATH_EXISTS, READ, WRITE, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java index ff2d58683..d28cf4e10 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/VolumeInfo.java @@ -13,6 +13,13 @@ public class VolumeInfo { @JsonProperty("access_point") private String accessPoint; + /** + * Indicates whether the principal is limited to retrieving metadata for the associated object + * through the BROWSE privilege when include_browse is enabled in the request. + */ + @JsonProperty("browse_only") + private Boolean browseOnly; + /** The name of the catalog where the schema and the volume are */ @JsonProperty("catalog_name") private String catalogName; @@ -82,6 +89,15 @@ public String getAccessPoint() { return accessPoint; } + public VolumeInfo setBrowseOnly(Boolean browseOnly) { + this.browseOnly = browseOnly; + return this; + } + + public Boolean getBrowseOnly() { + return browseOnly; + } + public VolumeInfo setCatalogName(String catalogName) { this.catalogName = catalogName; return this; @@ -223,6 +239,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; VolumeInfo that = (VolumeInfo) o; return Objects.equals(accessPoint, that.accessPoint) + && Objects.equals(browseOnly, that.browseOnly) && Objects.equals(catalogName, that.catalogName) && Objects.equals(comment, that.comment) && Objects.equals(createdAt, that.createdAt) @@ -244,6 +261,7 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( accessPoint, + browseOnly, catalogName, comment, createdAt, @@ -265,6 +283,7 @@ public int hashCode() { public String toString() { return new ToStringer(VolumeInfo.class) .add("accessPoint", accessPoint) + .add("browseOnly", browseOnly) .add("catalogName", catalogName) .add("comment", comment) .add("createdAt", createdAt) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java index 72263ebee..505b70166 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/AwsAttributes.java @@ -38,7 +38,10 @@ public class AwsAttributes { @JsonProperty("ebs_volume_count") private Long ebsVolumeCount; - /** */ + /** + * If using gp3 volumes, what IOPS to use for the disk. If this is not set, the maximum + * performance of a gp2 volume with the same volume size will be used. + */ @JsonProperty("ebs_volume_iops") private Long ebsVolumeIops; @@ -50,7 +53,10 @@ public class AwsAttributes { @JsonProperty("ebs_volume_size") private Long ebsVolumeSize; - /** */ + /** + * If using gp3 volumes, what throughput to use for the disk. If this is not set, the maximum + * performance of a gp2 volume with the same volume size will be used. + */ @JsonProperty("ebs_volume_throughput") private Long ebsVolumeThroughput; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloneCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloneCluster.java new file mode 100755 index 000000000..8846cf8aa --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CloneCluster.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CloneCluster { + /** The cluster that is being cloned. */ + @JsonProperty("source_cluster_id") + private String sourceClusterId; + + public CloneCluster setSourceClusterId(String sourceClusterId) { + this.sourceClusterId = sourceClusterId; + return this; + } + + public String getSourceClusterId() { + return sourceClusterId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CloneCluster that = (CloneCluster) o; + return Objects.equals(sourceClusterId, that.sourceClusterId); + } + + @Override + public int hashCode() { + return Objects.hash(sourceClusterId); + } + + @Override + public String toString() { + return new ToStringer(CloneCluster.class).add("sourceClusterId", sourceClusterId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesAPI.java index ff89273c2..5d97be737 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterPoliciesAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -133,7 +134,8 @@ public ClusterPolicyPermissions getPermissions(GetClusterPolicyPermissionsReques *

Returns a list of policies accessible by the requesting user. */ public Iterable list(ListClusterPoliciesRequest request) { - return impl.list(request).getPolicies(); + return new Paginator<>( + request, impl::list, ListPoliciesResponse::getPolicies, response -> null); } public ClusterPolicyPermissions setPermissions(String clusterPolicyId) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java index d64e22ab1..ef834b220 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java @@ -45,6 +45,13 @@ public class ClusterSpec { @JsonProperty("azure_attributes") private AzureAttributes azureAttributes; + /** + * When specified, this clones libraries from a source cluster during the creation of a new + * cluster. + */ + @JsonProperty("clone_from") + private CloneCluster cloneFrom; + /** * The configuration for delivering spark logs to a long-term storage destination. Two kinds of * destinations (dbfs and s3) are supported. Only one destination can be specified for one @@ -274,6 +281,15 @@ public AzureAttributes getAzureAttributes() { return azureAttributes; } + public ClusterSpec setCloneFrom(CloneCluster cloneFrom) { + this.cloneFrom = cloneFrom; + return this; + } + + public CloneCluster getCloneFrom() { + return cloneFrom; + } + public ClusterSpec setClusterLogConf(ClusterLogConf clusterLogConf) { this.clusterLogConf = clusterLogConf; return this; @@ -491,6 +507,7 @@ public boolean equals(Object o) { && Objects.equals(autoterminationMinutes, that.autoterminationMinutes) && Objects.equals(awsAttributes, that.awsAttributes) && Objects.equals(azureAttributes, that.azureAttributes) + && Objects.equals(cloneFrom, that.cloneFrom) && Objects.equals(clusterLogConf, that.clusterLogConf) && Objects.equals(clusterName, that.clusterName) && Objects.equals(clusterSource, that.clusterSource) @@ -524,6 +541,7 @@ public int hashCode() { autoterminationMinutes, awsAttributes, azureAttributes, + cloneFrom, clusterLogConf, clusterName, clusterSource, @@ -557,6 +575,7 @@ public String toString() { .add("autoterminationMinutes", autoterminationMinutes) .add("awsAttributes", awsAttributes) .add("azureAttributes", azureAttributes) + .add("cloneFrom", cloneFrom) .add("clusterLogConf", clusterLogConf) .add("clusterName", clusterName) .add("clusterSource", clusterSource) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterStatusRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterStatus.java similarity index 78% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterStatusRequest.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterStatus.java index 53fa286a2..7e4c40e41 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterStatusRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterStatus.java @@ -9,12 +9,12 @@ /** Get status */ @Generated -public class ClusterStatusRequest { +public class ClusterStatus { /** Unique identifier of the cluster whose status should be retrieved. */ @QueryParam("cluster_id") private String clusterId; - public ClusterStatusRequest setClusterId(String clusterId) { + public ClusterStatus setClusterId(String clusterId) { this.clusterId = clusterId; return this; } @@ -27,7 +27,7 @@ public String getClusterId() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - ClusterStatusRequest that = (ClusterStatusRequest) o; + ClusterStatus that = (ClusterStatus) o; return Objects.equals(clusterId, that.clusterId); } @@ -38,6 +38,6 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(ClusterStatusRequest.class).add("clusterId", clusterId).toString(); + return new ToStringer(ClusterStatus.class).add("clusterId", clusterId).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterStatusResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterStatusResponse.java new file mode 100755 index 000000000..6f62c8cb5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterStatusResponse.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ClusterStatusResponse { + /** Unique identifier for the cluster. */ + @JsonProperty("cluster_id") + private String clusterId; + + /** Status of all libraries on the cluster. */ + @JsonProperty("library_statuses") + private Collection libraryStatuses; + + public ClusterStatusResponse setClusterId(String clusterId) { + this.clusterId = clusterId; + return this; + } + + public String getClusterId() { + return clusterId; + } + + public ClusterStatusResponse setLibraryStatuses(Collection libraryStatuses) { + this.libraryStatuses = libraryStatuses; + return this; + } + + public Collection getLibraryStatuses() { + return libraryStatuses; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ClusterStatusResponse that = (ClusterStatusResponse) o; + return Objects.equals(clusterId, that.clusterId) + && Objects.equals(libraryStatuses, that.libraryStatuses); + } + + @Override + public int hashCode() { + return Objects.hash(clusterId, libraryStatuses); + } + + @Override + public String toString() { + return new ToStringer(ClusterStatusResponse.class) + .add("clusterId", clusterId) + .add("libraryStatuses", libraryStatuses) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java index 6bf0d8e66..f4681eb47 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java @@ -232,12 +232,7 @@ public Iterable events(String clusterId) { */ public Iterable events(GetEvents request) { return new Paginator<>( - request, - impl::events, - GetEventsResponse::getEvents, - response -> { - return response.getNextPage(); - }); + request, impl::events, GetEventsResponse::getEvents, response -> response.getNextPage()); } public ClusterDetails get(String clusterId) { @@ -294,7 +289,8 @@ public ClusterPermissions getPermissions(GetClusterPermissionsRequest request) { * the 30 most recently terminated job clusters. */ public Iterable list(ListClustersRequest request) { - return impl.list(request).getClusters(); + return new Paginator<>( + request, impl::list, ListClustersResponse::getClusters, response -> null); } /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ComputeSpecKind.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ComputeSpecKind.java deleted file mode 100755 index 28521670c..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ComputeSpecKind.java +++ /dev/null @@ -1,11 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.compute; - -import com.databricks.sdk.support.Generated; - -/** The kind of compute described by this compute specification. */ -@Generated -public enum ComputeSpecKind { - SERVERLESS_PREVIEW, -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java index dc294c91b..01824b261 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java @@ -45,6 +45,13 @@ public class CreateCluster { @JsonProperty("azure_attributes") private AzureAttributes azureAttributes; + /** + * When specified, this clones libraries from a source cluster during the creation of a new + * cluster. + */ + @JsonProperty("clone_from") + private CloneCluster cloneFrom; + /** * The configuration for delivering spark logs to a long-term storage destination. Two kinds of * destinations (dbfs and s3) are supported. Only one destination can be specified for one @@ -274,6 +281,15 @@ public AzureAttributes getAzureAttributes() { return azureAttributes; } + public CreateCluster setCloneFrom(CloneCluster cloneFrom) { + this.cloneFrom = cloneFrom; + return this; + } + + public CloneCluster getCloneFrom() { + return cloneFrom; + } + public CreateCluster setClusterLogConf(ClusterLogConf clusterLogConf) { this.clusterLogConf = clusterLogConf; return this; @@ -491,6 +507,7 @@ public boolean equals(Object o) { && Objects.equals(autoterminationMinutes, that.autoterminationMinutes) && Objects.equals(awsAttributes, that.awsAttributes) && Objects.equals(azureAttributes, that.azureAttributes) + && Objects.equals(cloneFrom, that.cloneFrom) && Objects.equals(clusterLogConf, that.clusterLogConf) && Objects.equals(clusterName, that.clusterName) && Objects.equals(clusterSource, that.clusterSource) @@ -524,6 +541,7 @@ public int hashCode() { autoterminationMinutes, awsAttributes, azureAttributes, + cloneFrom, clusterLogConf, clusterName, clusterSource, @@ -557,6 +575,7 @@ public String toString() { .add("autoterminationMinutes", autoterminationMinutes) .add("awsAttributes", awsAttributes) .add("azureAttributes", azureAttributes) + .add("cloneFrom", cloneFrom) .add("clusterLogConf", clusterLogConf) .add("clusterName", clusterName) .add("clusterSource", clusterSource) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java index 1113ae904..3da5b88c7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java @@ -45,6 +45,13 @@ public class EditCluster { @JsonProperty("azure_attributes") private AzureAttributes azureAttributes; + /** + * When specified, this clones libraries from a source cluster during the creation of a new + * cluster. + */ + @JsonProperty("clone_from") + private CloneCluster cloneFrom; + /** ID of the cluser */ @JsonProperty("cluster_id") private String clusterId; @@ -278,6 +285,15 @@ public AzureAttributes getAzureAttributes() { return azureAttributes; } + public EditCluster setCloneFrom(CloneCluster cloneFrom) { + this.cloneFrom = cloneFrom; + return this; + } + + public CloneCluster getCloneFrom() { + return cloneFrom; + } + public EditCluster setClusterId(String clusterId) { this.clusterId = clusterId; return this; @@ -504,6 +520,7 @@ public boolean equals(Object o) { && Objects.equals(autoterminationMinutes, that.autoterminationMinutes) && Objects.equals(awsAttributes, that.awsAttributes) && Objects.equals(azureAttributes, that.azureAttributes) + && Objects.equals(cloneFrom, that.cloneFrom) && Objects.equals(clusterId, that.clusterId) && Objects.equals(clusterLogConf, that.clusterLogConf) && Objects.equals(clusterName, that.clusterName) @@ -538,6 +555,7 @@ public int hashCode() { autoterminationMinutes, awsAttributes, azureAttributes, + cloneFrom, clusterId, clusterLogConf, clusterName, @@ -572,6 +590,7 @@ public String toString() { .add("autoterminationMinutes", autoterminationMinutes) .add("awsAttributes", awsAttributes) .add("azureAttributes", azureAttributes) + .add("cloneFrom", cloneFrom) .add("clusterId", clusterId) .add("clusterLogConf", clusterLogConf) .add("clusterName", clusterName) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java new file mode 100755 index 000000000..e46010d44 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Environment.java @@ -0,0 +1,75 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.compute; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** + * The a environment entity used to preserve serverless environment side panel and jobs' environment + * for non-notebook task. In this minimal environment spec, only pip dependencies are supported. + * Next ID: 5 + */ +@Generated +public class Environment { + /** + * Client version used by the environment The client is the user-facing environment of the + * runtime. Each client comes with a specific set of pre-installed libraries. The version is a + * string, consisting of the major client version. + */ + @JsonProperty("client") + private String client; + + /** + * List of pip dependencies, as supported by the version of pip in this environment. Each + * dependency is a pip requirement file line + * https://pip.pypa.io/en/stable/reference/requirements-file-format/ Allowed dependency could be + * , , (WSFS or Volumes in + * Databricks), E.g. dependencies: ["foo==0.0.1", "-r + * /Workspace/test/requirements.txt"] + */ + @JsonProperty("dependencies") + private Collection dependencies; + + public Environment setClient(String client) { + this.client = client; + return this; + } + + public String getClient() { + return client; + } + + public Environment setDependencies(Collection dependencies) { + this.dependencies = dependencies; + return this; + } + + public Collection getDependencies() { + return dependencies; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Environment that = (Environment) o; + return Objects.equals(client, that.client) && Objects.equals(dependencies, that.dependencies); + } + + @Override + public int hashCode() { + return Objects.hash(client, dependencies); + } + + @Override + public String toString() { + return new ToStringer(Environment.class) + .add("client", client) + .add("dependencies", dependencies) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsAPI.java index 54d1829f3..28f450f8e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GlobalInitScriptsAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -78,7 +79,8 @@ public GlobalInitScriptDetailsWithContent get(GetGlobalInitScriptRequest request * a global init script](:method:globalinitscripts/get) operation. */ public Iterable list() { - return impl.list().getScripts(); + return new Paginator<>( + null, (Void v) -> impl.list(), ListGlobalInitScriptsResponse::getScripts, response -> null); } public void update(String scriptId, String name, String script) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolGcpAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolGcpAttributes.java index 3cece3f78..b989f56de 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolGcpAttributes.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolGcpAttributes.java @@ -33,6 +33,13 @@ public class InstancePoolGcpAttributes { * the Databricks workspace. For example, "us-west1-a" is not a valid zone id if the Databricks * workspace resides in the "us-east1" region. This is an optional field at instance pool * creation, and if not specified, a default zone will be used. + * + *

This field can be one of the following: - "HA" => High availability, spread nodes across + * availability zones for a Databricks deployment region - A GCP availability zone => Pick One of + * the available zones for (machine type + region) from + * https://cloud.google.com/compute/docs/regions-zones (e.g. "us-west1-a"). + * + *

If empty, Databricks picks an availability zone to schedule the cluster on. */ @JsonProperty("zone_id") private String zoneId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsAPI.java index fa2da924f..11431a3c3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolsAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -133,7 +134,8 @@ public InstancePoolPermissions getPermissions(GetInstancePoolPermissionsRequest *

Gets a list of instance pools with their statistics. */ public Iterable list() { - return impl.list().getInstancePools(); + return new Paginator<>( + null, (Void v) -> impl.list(), ListInstancePools::getInstancePools, response -> null); } public InstancePoolPermissions setPermissions(String instancePoolId) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesAPI.java index edc9d1921..2408fad89 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstanceProfilesAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -77,7 +78,11 @@ public void edit(InstanceProfile request) { *

This API is available to all users. */ public Iterable list() { - return impl.list().getInstanceProfiles(); + return new Paginator<>( + null, + (Void v) -> impl.list(), + ListInstanceProfilesResponse::getInstanceProfiles, + response -> null); } public void remove(String instanceProfileArn) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesAPI.java index a1cf72255..2de225d87 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import java.util.Collection; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -13,17 +14,13 @@ * *

To make third-party or custom code available to notebooks and jobs running on your clusters, * you can install a library. Libraries can be written in Python, Java, Scala, and R. You can upload - * Java, Scala, and Python libraries and point to external packages in PyPI, Maven, and CRAN + * Python, Java, Scala and R libraries and point to external packages in PyPI, Maven, and CRAN * repositories. * *

Cluster libraries can be used by all notebooks running on a cluster. You can install a cluster * library directly from a public repository such as PyPI or Maven, using a previously installed * workspace library, or using an init script. * - *

When you install a library on a cluster, a notebook already attached to that cluster will not - * immediately see the new library. You must first detach and then reattach the notebook to the - * cluster. - * *

When you uninstall a library from a cluster, the library is removed only when you restart the * cluster. Until you restart the cluster, the status of the uninstalled library appears as * Uninstall pending restart. @@ -47,37 +44,29 @@ public LibrariesAPI(LibrariesService mock) { /** * Get all statuses. * - *

Get the status of all libraries on all clusters. A status will be available for all - * libraries installed on this cluster via the API or the libraries UI as well as libraries set to - * be installed on all clusters via the libraries UI. + *

Get the status of all libraries on all clusters. A status is returned for all libraries + * installed on this cluster via the API or the libraries UI. */ public ListAllClusterLibraryStatusesResponse allClusterStatuses() { return impl.allClusterStatuses(); } public Iterable clusterStatus(String clusterId) { - return clusterStatus(new ClusterStatusRequest().setClusterId(clusterId)); + return clusterStatus(new ClusterStatus().setClusterId(clusterId)); } /** * Get status. * - *

Get the status of libraries on a cluster. A status will be available for all libraries - * installed on this cluster via the API or the libraries UI as well as libraries set to be - * installed on all clusters via the libraries UI. The order of returned libraries will be as - * follows. - * - *

1. Libraries set to be installed on this cluster will be returned first. Within this group, - * the final order will be order in which the libraries were added to the cluster. - * - *

2. Libraries set to be installed on all clusters are returned next. Within this group there - * is no order guarantee. - * - *

3. Libraries that were previously requested on this cluster or on all clusters, but now - * marked for removal. Within this group there is no order guarantee. + *

Get the status of libraries on a cluster. A status is returned for all libraries installed + * on this cluster via the API or the libraries UI. The order of returned libraries is as follows: + * 1. Libraries set to be installed on this cluster, in the order that the libraries were added to + * the cluster, are returned first. 2. Libraries that were previously requested to be installed on + * this cluster or, but are now marked for removal, in no particular order, are returned last. */ - public Iterable clusterStatus(ClusterStatusRequest request) { - return impl.clusterStatus(request).getLibraryStatuses(); + public Iterable clusterStatus(ClusterStatus request) { + return new Paginator<>( + request, impl::clusterStatus, ClusterStatusResponse::getLibraryStatuses, response -> null); } public void install(String clusterId, Collection libraries) { @@ -87,12 +76,8 @@ public void install(String clusterId, Collection libraries) { /** * Add a library. * - *

Add libraries to be installed on a cluster. The installation is asynchronous; it happens in - * the background after the completion of this request. - * - *

**Note**: The actual set of libraries to be installed on a cluster is the union of the - * libraries specified via this method and the libraries set to be installed on all clusters via - * the libraries UI. + *

Add libraries to install on a cluster. The installation is asynchronous; it happens in the + * background after the completion of this request. */ public void install(InstallLibraries request) { impl.install(request); @@ -105,9 +90,9 @@ public void uninstall(String clusterId, Collection libraries) { /** * Uninstall libraries. * - *

Set libraries to be uninstalled on a cluster. The libraries won't be uninstalled until the - * cluster is restarted. Uninstalling libraries that are not installed on the cluster will have no - * impact but is not an error. + *

Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the + * cluster is restarted. A request to uninstall a library that is not currently installed is + * ignored. */ public void uninstall(UninstallLibraries request) { impl.uninstall(request); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java index 1485f7688..8c8124d6a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java @@ -24,11 +24,11 @@ public ListAllClusterLibraryStatusesResponse allClusterStatuses() { } @Override - public ClusterLibraryStatuses clusterStatus(ClusterStatusRequest request) { + public ClusterStatusResponse clusterStatus(ClusterStatus request) { String path = "/api/2.0/libraries/cluster-status"; Map headers = new HashMap<>(); headers.put("Accept", "application/json"); - return apiClient.GET(path, request, ClusterLibraryStatuses.class, headers); + return apiClient.GET(path, request, ClusterStatusResponse.class, headers); } @Override diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesService.java index 99119dc20..2e9773ff5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesService.java @@ -9,17 +9,13 @@ * *

To make third-party or custom code available to notebooks and jobs running on your clusters, * you can install a library. Libraries can be written in Python, Java, Scala, and R. You can upload - * Java, Scala, and Python libraries and point to external packages in PyPI, Maven, and CRAN + * Python, Java, Scala and R libraries and point to external packages in PyPI, Maven, and CRAN * repositories. * *

Cluster libraries can be used by all notebooks running on a cluster. You can install a cluster * library directly from a public repository such as PyPI or Maven, using a previously installed * workspace library, or using an init script. * - *

When you install a library on a cluster, a notebook already attached to that cluster will not - * immediately see the new library. You must first detach and then reattach the notebook to the - * cluster. - * *

When you uninstall a library from a cluster, the library is removed only when you restart the * cluster. Until you restart the cluster, the status of the uninstalled library appears as * Uninstall pending restart. @@ -33,49 +29,36 @@ public interface LibrariesService { /** * Get all statuses. * - *

Get the status of all libraries on all clusters. A status will be available for all - * libraries installed on this cluster via the API or the libraries UI as well as libraries set to - * be installed on all clusters via the libraries UI. + *

Get the status of all libraries on all clusters. A status is returned for all libraries + * installed on this cluster via the API or the libraries UI. */ ListAllClusterLibraryStatusesResponse allClusterStatuses(); /** * Get status. * - *

Get the status of libraries on a cluster. A status will be available for all libraries - * installed on this cluster via the API or the libraries UI as well as libraries set to be - * installed on all clusters via the libraries UI. The order of returned libraries will be as - * follows. - * - *

1. Libraries set to be installed on this cluster will be returned first. Within this group, - * the final order will be order in which the libraries were added to the cluster. - * - *

2. Libraries set to be installed on all clusters are returned next. Within this group there - * is no order guarantee. - * - *

3. Libraries that were previously requested on this cluster or on all clusters, but now - * marked for removal. Within this group there is no order guarantee. + *

Get the status of libraries on a cluster. A status is returned for all libraries installed + * on this cluster via the API or the libraries UI. The order of returned libraries is as follows: + * 1. Libraries set to be installed on this cluster, in the order that the libraries were added to + * the cluster, are returned first. 2. Libraries that were previously requested to be installed on + * this cluster or, but are now marked for removal, in no particular order, are returned last. */ - ClusterLibraryStatuses clusterStatus(ClusterStatusRequest clusterStatusRequest); + ClusterStatusResponse clusterStatus(ClusterStatus clusterStatus); /** * Add a library. * - *

Add libraries to be installed on a cluster. The installation is asynchronous; it happens in - * the background after the completion of this request. - * - *

**Note**: The actual set of libraries to be installed on a cluster is the union of the - * libraries specified via this method and the libraries set to be installed on all clusters via - * the libraries UI. + *

Add libraries to install on a cluster. The installation is asynchronous; it happens in the + * background after the completion of this request. */ void install(InstallLibraries installLibraries); /** * Uninstall libraries. * - *

Set libraries to be uninstalled on a cluster. The libraries won't be uninstalled until the - * cluster is restarted. Uninstalling libraries that are not installed on the cluster will have no - * impact but is not an error. + *

Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the + * cluster is restarted. A request to uninstall a library that is not currently installed is + * ignored. */ void uninstall(UninstallLibraries uninstallLibraries); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Library.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Library.java index 85f8a1724..7f9f10961 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Library.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/Library.java @@ -14,18 +14,20 @@ public class Library { private RCranLibrary cran; /** - * URI of the egg to be installed. Currently only DBFS and S3 URIs are supported. For example: `{ - * "egg": "dbfs:/my/egg" }` or `{ "egg": "s3://my-bucket/egg" }`. If S3 is used, please make sure - * the cluster has read access on the library. You may need to launch the cluster with an IAM role - * to access the S3 URI. + * URI of the egg library to install. Supported URIs include Workspace paths, Unity Catalog + * Volumes paths, and S3 URIs. For example: `{ "egg": "/Workspace/path/to/library.egg" }`, `{ + * "egg" : "/Volumes/path/to/library.egg" }` or `{ "egg": "s3://my-bucket/library.egg" }`. If S3 + * is used, please make sure the cluster has read access on the library. You may need to launch + * the cluster with an IAM role to access the S3 URI. */ @JsonProperty("egg") private String egg; /** - * URI of the jar to be installed. Currently only DBFS and S3 URIs are supported. For example: `{ - * "jar": "dbfs:/mnt/databricks/library.jar" }` or `{ "jar": "s3://my-bucket/library.jar" }`. If - * S3 is used, please make sure the cluster has read access on the library. You may need to launch + * URI of the JAR library to install. Supported URIs include Workspace paths, Unity Catalog + * Volumes paths, and S3 URIs. For example: `{ "jar": "/Workspace/path/to/library.jar" }`, `{ + * "jar" : "/Volumes/path/to/library.jar" }` or `{ "jar": "s3://my-bucket/library.jar" }`. If S3 + * is used, please make sure the cluster has read access on the library. You may need to launch * the cluster with an IAM role to access the S3 URI. */ @JsonProperty("jar") @@ -43,9 +45,19 @@ public class Library { private PythonPyPiLibrary pypi; /** - * URI of the wheel to be installed. For example: `{ "whl": "dbfs:/my/whl" }` or `{ "whl": - * "s3://my-bucket/whl" }`. If S3 is used, please make sure the cluster has read access on the - * library. You may need to launch the cluster with an IAM role to access the S3 URI. + * URI of the requirements.txt file to install. Only Workspace paths and Unity Catalog Volumes + * paths are supported. For example: `{ "requirements": "/Workspace/path/to/requirements.txt" }` + * or `{ "requirements" : "/Volumes/path/to/requirements.txt" }` + */ + @JsonProperty("requirements") + private String requirements; + + /** + * URI of the wheel library to install. Supported URIs include Workspace paths, Unity Catalog + * Volumes paths, and S3 URIs. For example: `{ "whl": "/Workspace/path/to/library.whl" }`, `{ + * "whl" : "/Volumes/path/to/library.whl" }` or `{ "whl": "s3://my-bucket/library.whl" }`. If S3 + * is used, please make sure the cluster has read access on the library. You may need to launch + * the cluster with an IAM role to access the S3 URI. */ @JsonProperty("whl") private String whl; @@ -95,6 +107,15 @@ public PythonPyPiLibrary getPypi() { return pypi; } + public Library setRequirements(String requirements) { + this.requirements = requirements; + return this; + } + + public String getRequirements() { + return requirements; + } + public Library setWhl(String whl) { this.whl = whl; return this; @@ -114,12 +135,13 @@ public boolean equals(Object o) { && Objects.equals(jar, that.jar) && Objects.equals(maven, that.maven) && Objects.equals(pypi, that.pypi) + && Objects.equals(requirements, that.requirements) && Objects.equals(whl, that.whl); } @Override public int hashCode() { - return Objects.hash(cran, egg, jar, maven, pypi, whl); + return Objects.hash(cran, egg, jar, maven, pypi, requirements, whl); } @Override @@ -130,6 +152,7 @@ public String toString() { .add("jar", jar) .add("maven", maven) .add("pypi", pypi) + .add("requirements", requirements) .add("whl", whl) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryFullStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryFullStatus.java index bdab295df..8a4a0b6c0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryFullStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryFullStatus.java @@ -8,6 +8,7 @@ import java.util.Collection; import java.util.Objects; +/** The status of the library on a specific cluster. */ @Generated public class LibraryFullStatus { /** Whether the library was set to be installed on all clusters via the libraries UI. */ @@ -24,7 +25,7 @@ public class LibraryFullStatus { /** Status of installing the library on the cluster. */ @JsonProperty("status") - private LibraryFullStatusStatus status; + private LibraryInstallStatus status; public LibraryFullStatus setIsLibraryForAllClusters(Boolean isLibraryForAllClusters) { this.isLibraryForAllClusters = isLibraryForAllClusters; @@ -53,12 +54,12 @@ public Collection getMessages() { return messages; } - public LibraryFullStatus setStatus(LibraryFullStatusStatus status) { + public LibraryFullStatus setStatus(LibraryInstallStatus status) { this.status = status; return this; } - public LibraryFullStatusStatus getStatus() { + public LibraryInstallStatus getStatus() { return status; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryFullStatusStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryInstallStatus.java similarity index 73% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryFullStatusStatus.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryInstallStatus.java index 484f2fc83..51c743797 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryFullStatusStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibraryInstallStatus.java @@ -4,14 +4,15 @@ import com.databricks.sdk.support.Generated; -/** Status of installing the library on the cluster. */ +/** The status of a library on a specific cluster. */ @Generated -public enum LibraryFullStatusStatus { +public enum LibraryInstallStatus { FAILED, INSTALLED, INSTALLING, PENDING, RESOLVING, + RESTORED, SKIPPED, UNINSTALL_ON_RESTART, } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java new file mode 100755 index 000000000..5977206de --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java @@ -0,0 +1,92 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateDashboardRequest { + /** The display name of the dashboard. */ + @JsonProperty("display_name") + private String displayName; + + /** + * The workspace path of the folder containing the dashboard. Includes leading slash and no + * trailing slash. + */ + @JsonProperty("parent_path") + private String parentPath; + + /** The contents of the dashboard in serialized string form. */ + @JsonProperty("serialized_dashboard") + private String serializedDashboard; + + /** The warehouse ID used to run the dashboard. */ + @JsonProperty("warehouse_id") + private String warehouseId; + + public CreateDashboardRequest setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public CreateDashboardRequest setParentPath(String parentPath) { + this.parentPath = parentPath; + return this; + } + + public String getParentPath() { + return parentPath; + } + + public CreateDashboardRequest setSerializedDashboard(String serializedDashboard) { + this.serializedDashboard = serializedDashboard; + return this; + } + + public String getSerializedDashboard() { + return serializedDashboard; + } + + public CreateDashboardRequest setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateDashboardRequest that = (CreateDashboardRequest) o; + return Objects.equals(displayName, that.displayName) + && Objects.equals(parentPath, that.parentPath) + && Objects.equals(serializedDashboard, that.serializedDashboard) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash(displayName, parentPath, serializedDashboard, warehouseId); + } + + @Override + public String toString() { + return new ToStringer(CreateDashboardRequest.class) + .add("displayName", displayName) + .add("parentPath", parentPath) + .add("serializedDashboard", serializedDashboard) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Dashboard.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Dashboard.java new file mode 100755 index 000000000..8353ab585 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Dashboard.java @@ -0,0 +1,195 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class Dashboard { + /** The timestamp of when the dashboard was created. */ + @JsonProperty("create_time") + private String createTime; + + /** UUID identifying the dashboard. */ + @JsonProperty("dashboard_id") + private String dashboardId; + + /** The display name of the dashboard. */ + @JsonProperty("display_name") + private String displayName; + + /** + * The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard + * has not been modified since the last read. + */ + @JsonProperty("etag") + private String etag; + + /** The state of the dashboard resource. Used for tracking trashed status. */ + @JsonProperty("lifecycle_state") + private LifecycleState lifecycleState; + + /** + * The workspace path of the folder containing the dashboard. Includes leading slash and no + * trailing slash. + */ + @JsonProperty("parent_path") + private String parentPath; + + /** The workspace path of the dashboard asset, including the file name. */ + @JsonProperty("path") + private String path; + + /** The contents of the dashboard in serialized string form. */ + @JsonProperty("serialized_dashboard") + private String serializedDashboard; + + /** The timestamp of when the dashboard was last updated by the user. */ + @JsonProperty("update_time") + private String updateTime; + + /** The warehouse ID used to run the dashboard. */ + @JsonProperty("warehouse_id") + private String warehouseId; + + public Dashboard setCreateTime(String createTime) { + this.createTime = createTime; + return this; + } + + public String getCreateTime() { + return createTime; + } + + public Dashboard setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public Dashboard setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public Dashboard setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public Dashboard setLifecycleState(LifecycleState lifecycleState) { + this.lifecycleState = lifecycleState; + return this; + } + + public LifecycleState getLifecycleState() { + return lifecycleState; + } + + public Dashboard setParentPath(String parentPath) { + this.parentPath = parentPath; + return this; + } + + public String getParentPath() { + return parentPath; + } + + public Dashboard setPath(String path) { + this.path = path; + return this; + } + + public String getPath() { + return path; + } + + public Dashboard setSerializedDashboard(String serializedDashboard) { + this.serializedDashboard = serializedDashboard; + return this; + } + + public String getSerializedDashboard() { + return serializedDashboard; + } + + public Dashboard setUpdateTime(String updateTime) { + this.updateTime = updateTime; + return this; + } + + public String getUpdateTime() { + return updateTime; + } + + public Dashboard setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Dashboard that = (Dashboard) o; + return Objects.equals(createTime, that.createTime) + && Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(displayName, that.displayName) + && Objects.equals(etag, that.etag) + && Objects.equals(lifecycleState, that.lifecycleState) + && Objects.equals(parentPath, that.parentPath) + && Objects.equals(path, that.path) + && Objects.equals(serializedDashboard, that.serializedDashboard) + && Objects.equals(updateTime, that.updateTime) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash( + createTime, + dashboardId, + displayName, + etag, + lifecycleState, + parentPath, + path, + serializedDashboard, + updateTime, + warehouseId); + } + + @Override + public String toString() { + return new ToStringer(Dashboard.class) + .add("createTime", createTime) + .add("dashboardId", dashboardId) + .add("displayName", displayName) + .add("etag", etag) + .add("lifecycleState", lifecycleState) + .add("parentPath", parentPath) + .add("path", path) + .add("serializedDashboard", serializedDashboard) + .add("updateTime", updateTime) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetDashboardRequest.java new file mode 100755 index 000000000..bbdb5b13a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetDashboardRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** Get dashboard */ +@Generated +public class GetDashboardRequest { + /** UUID identifying the dashboard. */ + private String dashboardId; + + public GetDashboardRequest setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetDashboardRequest that = (GetDashboardRequest) o; + return Objects.equals(dashboardId, that.dashboardId); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId); + } + + @Override + public String toString() { + return new ToStringer(GetDashboardRequest.class).add("dashboardId", dashboardId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardRequest.java new file mode 100755 index 000000000..6f29da065 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardRequest.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** Get published dashboard */ +@Generated +public class GetPublishedDashboardRequest { + /** UUID identifying the dashboard to be published. */ + private String dashboardId; + + public GetPublishedDashboardRequest setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPublishedDashboardRequest that = (GetPublishedDashboardRequest) o; + return Objects.equals(dashboardId, that.dashboardId); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId); + } + + @Override + public String toString() { + return new ToStringer(GetPublishedDashboardRequest.class) + .add("dashboardId", dashboardId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java index bc55fbe21..ff62385cf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewAPI.java @@ -26,8 +26,60 @@ public LakeviewAPI(LakeviewService mock) { impl = mock; } - public void publish(String dashboardId) { - publish(new PublishRequest().setDashboardId(dashboardId)); + public Dashboard create(String displayName) { + return create(new CreateDashboardRequest().setDisplayName(displayName)); + } + + /** + * Create dashboard. + * + *

Create a draft dashboard. + */ + public Dashboard create(CreateDashboardRequest request) { + return impl.create(request); + } + + public Dashboard get(String dashboardId) { + return get(new GetDashboardRequest().setDashboardId(dashboardId)); + } + + /** + * Get dashboard. + * + *

Get a draft dashboard. + */ + public Dashboard get(GetDashboardRequest request) { + return impl.get(request); + } + + public PublishedDashboard getPublished(String dashboardId) { + return getPublished(new GetPublishedDashboardRequest().setDashboardId(dashboardId)); + } + + /** + * Get published dashboard. + * + *

Get the current published dashboard. + */ + public PublishedDashboard getPublished(GetPublishedDashboardRequest request) { + return impl.getPublished(request); + } + + public Dashboard migrate(String sourceDashboardId) { + return migrate(new MigrateDashboardRequest().setSourceDashboardId(sourceDashboardId)); + } + + /** + * Migrate dashboard. + * + *

Migrates a classic SQL dashboard to Lakeview. + */ + public Dashboard migrate(MigrateDashboardRequest request) { + return impl.migrate(request); + } + + public PublishedDashboard publish(String dashboardId) { + return publish(new PublishRequest().setDashboardId(dashboardId)); } /** @@ -35,8 +87,47 @@ public void publish(String dashboardId) { * *

Publish the current draft dashboard. */ - public void publish(PublishRequest request) { - impl.publish(request); + public PublishedDashboard publish(PublishRequest request) { + return impl.publish(request); + } + + public void trash(String dashboardId) { + trash(new TrashDashboardRequest().setDashboardId(dashboardId)); + } + + /** + * Trash dashboard. + * + *

Trash a dashboard. + */ + public void trash(TrashDashboardRequest request) { + impl.trash(request); + } + + public void unpublish(String dashboardId) { + unpublish(new UnpublishDashboardRequest().setDashboardId(dashboardId)); + } + + /** + * Unpublish dashboard. + * + *

Unpublish the dashboard. + */ + public void unpublish(UnpublishDashboardRequest request) { + impl.unpublish(request); + } + + public Dashboard update(String dashboardId) { + return update(new UpdateDashboardRequest().setDashboardId(dashboardId)); + } + + /** + * Update dashboard. + * + *

Update a draft dashboard. + */ + public Dashboard update(UpdateDashboardRequest request) { + return impl.update(request); } public LakeviewService impl() { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java index 8386295ce..10a926490 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewImpl.java @@ -16,12 +16,73 @@ public LakeviewImpl(ApiClient apiClient) { } @Override - public void publish(PublishRequest request) { + public Dashboard create(CreateDashboardRequest request) { + String path = "/api/2.0/lakeview/dashboards"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.POST(path, request, Dashboard.class, headers); + } + + @Override + public Dashboard get(GetDashboardRequest request) { + String path = String.format("/api/2.0/lakeview/dashboards/%s", request.getDashboardId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, Dashboard.class, headers); + } + + @Override + public PublishedDashboard getPublished(GetPublishedDashboardRequest request) { + String path = + String.format("/api/2.0/lakeview/dashboards/%s/published", request.getDashboardId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, PublishedDashboard.class, headers); + } + + @Override + public Dashboard migrate(MigrateDashboardRequest request) { + String path = "/api/2.0/lakeview/dashboards/migrate"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.POST(path, request, Dashboard.class, headers); + } + + @Override + public PublishedDashboard publish(PublishRequest request) { String path = String.format("/api/2.0/lakeview/dashboards/%s/published", request.getDashboardId()); Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); - apiClient.POST(path, request, PublishResponse.class, headers); + return apiClient.POST(path, request, PublishedDashboard.class, headers); + } + + @Override + public void trash(TrashDashboardRequest request) { + String path = String.format("/api/2.0/lakeview/dashboards/%s", request.getDashboardId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + apiClient.DELETE(path, request, TrashDashboardResponse.class, headers); + } + + @Override + public void unpublish(UnpublishDashboardRequest request) { + String path = + String.format("/api/2.0/lakeview/dashboards/%s/published", request.getDashboardId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + apiClient.DELETE(path, request, UnpublishDashboardResponse.class, headers); + } + + @Override + public Dashboard update(UpdateDashboardRequest request) { + String path = String.format("/api/2.0/lakeview/dashboards/%s", request.getDashboardId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.PATCH(path, request, Dashboard.class, headers); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewService.java index c283a65ae..d5d713404 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewService.java @@ -13,10 +13,59 @@ */ @Generated public interface LakeviewService { + /** + * Create dashboard. + * + *

Create a draft dashboard. + */ + Dashboard create(CreateDashboardRequest createDashboardRequest); + + /** + * Get dashboard. + * + *

Get a draft dashboard. + */ + Dashboard get(GetDashboardRequest getDashboardRequest); + + /** + * Get published dashboard. + * + *

Get the current published dashboard. + */ + PublishedDashboard getPublished(GetPublishedDashboardRequest getPublishedDashboardRequest); + + /** + * Migrate dashboard. + * + *

Migrates a classic SQL dashboard to Lakeview. + */ + Dashboard migrate(MigrateDashboardRequest migrateDashboardRequest); + /** * Publish dashboard. * *

Publish the current draft dashboard. */ - void publish(PublishRequest publishRequest); + PublishedDashboard publish(PublishRequest publishRequest); + + /** + * Trash dashboard. + * + *

Trash a dashboard. + */ + void trash(TrashDashboardRequest trashDashboardRequest); + + /** + * Unpublish dashboard. + * + *

Unpublish the dashboard. + */ + void unpublish(UnpublishDashboardRequest unpublishDashboardRequest); + + /** + * Update dashboard. + * + *

Update a draft dashboard. + */ + Dashboard update(UpdateDashboardRequest updateDashboardRequest); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LifecycleState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LifecycleState.java new file mode 100755 index 000000000..37abfd2a8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LifecycleState.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum LifecycleState { + ACTIVE, + TRASHED, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MigrateDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MigrateDashboardRequest.java new file mode 100755 index 000000000..360c202ec --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MigrateDashboardRequest.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class MigrateDashboardRequest { + /** Display name for the new Lakeview dashboard. */ + @JsonProperty("display_name") + private String displayName; + + /** The workspace path of the folder to contain the migrated Lakeview dashboard. */ + @JsonProperty("parent_path") + private String parentPath; + + /** UUID of the dashboard to be migrated. */ + @JsonProperty("source_dashboard_id") + private String sourceDashboardId; + + public MigrateDashboardRequest setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public MigrateDashboardRequest setParentPath(String parentPath) { + this.parentPath = parentPath; + return this; + } + + public String getParentPath() { + return parentPath; + } + + public MigrateDashboardRequest setSourceDashboardId(String sourceDashboardId) { + this.sourceDashboardId = sourceDashboardId; + return this; + } + + public String getSourceDashboardId() { + return sourceDashboardId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MigrateDashboardRequest that = (MigrateDashboardRequest) o; + return Objects.equals(displayName, that.displayName) + && Objects.equals(parentPath, that.parentPath) + && Objects.equals(sourceDashboardId, that.sourceDashboardId); + } + + @Override + public int hashCode() { + return Objects.hash(displayName, parentPath, sourceDashboardId); + } + + @Override + public String toString() { + return new ToStringer(MigrateDashboardRequest.class) + .add("displayName", displayName) + .add("parentPath", parentPath) + .add("sourceDashboardId", sourceDashboardId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishedDashboard.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishedDashboard.java new file mode 100755 index 000000000..c8133c4f2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishedDashboard.java @@ -0,0 +1,89 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class PublishedDashboard { + /** The display name of the published dashboard. */ + @JsonProperty("display_name") + private String displayName; + + /** Indicates whether credentials are embedded in the published dashboard. */ + @JsonProperty("embed_credentials") + private Boolean embedCredentials; + + /** The timestamp of when the published dashboard was last revised. */ + @JsonProperty("revision_create_time") + private String revisionCreateTime; + + /** The warehouse ID used to run the published dashboard. */ + @JsonProperty("warehouse_id") + private String warehouseId; + + public PublishedDashboard setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public PublishedDashboard setEmbedCredentials(Boolean embedCredentials) { + this.embedCredentials = embedCredentials; + return this; + } + + public Boolean getEmbedCredentials() { + return embedCredentials; + } + + public PublishedDashboard setRevisionCreateTime(String revisionCreateTime) { + this.revisionCreateTime = revisionCreateTime; + return this; + } + + public String getRevisionCreateTime() { + return revisionCreateTime; + } + + public PublishedDashboard setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PublishedDashboard that = (PublishedDashboard) o; + return Objects.equals(displayName, that.displayName) + && Objects.equals(embedCredentials, that.embedCredentials) + && Objects.equals(revisionCreateTime, that.revisionCreateTime) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash(displayName, embedCredentials, revisionCreateTime, warehouseId); + } + + @Override + public String toString() { + return new ToStringer(PublishedDashboard.class) + .add("displayName", displayName) + .add("embedCredentials", embedCredentials) + .add("revisionCreateTime", revisionCreateTime) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequest.java new file mode 100755 index 000000000..b346cd139 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** Trash dashboard */ +@Generated +public class TrashDashboardRequest { + /** UUID identifying the dashboard. */ + private String dashboardId; + + public TrashDashboardRequest setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TrashDashboardRequest that = (TrashDashboardRequest) o; + return Objects.equals(dashboardId, that.dashboardId); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId); + } + + @Override + public String toString() { + return new ToStringer(TrashDashboardRequest.class).add("dashboardId", dashboardId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardResponse.java similarity index 83% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishResponse.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardResponse.java index 9f953cd06..0f43a3a49 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardResponse.java @@ -7,7 +7,7 @@ import java.util.Objects; @Generated -public class PublishResponse { +public class TrashDashboardResponse { @Override public boolean equals(Object o) { @@ -23,6 +23,6 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(PublishResponse.class).toString(); + return new ToStringer(TrashDashboardResponse.class).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardRequest.java new file mode 100755 index 000000000..6e18e5e72 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardRequest.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** Unpublish dashboard */ +@Generated +public class UnpublishDashboardRequest { + /** UUID identifying the dashboard to be published. */ + private String dashboardId; + + public UnpublishDashboardRequest setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UnpublishDashboardRequest that = (UnpublishDashboardRequest) o; + return Objects.equals(dashboardId, that.dashboardId); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId); + } + + @Override + public String toString() { + return new ToStringer(UnpublishDashboardRequest.class) + .add("dashboardId", dashboardId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardResponse.java new file mode 100755 index 000000000..211e9c010 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardResponse.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +public class UnpublishDashboardResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(UnpublishDashboardResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java new file mode 100755 index 000000000..46a384eec --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UpdateDashboardRequest.java @@ -0,0 +1,106 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.dashboards; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateDashboardRequest { + /** UUID identifying the dashboard. */ + private String dashboardId; + + /** The display name of the dashboard. */ + @JsonProperty("display_name") + private String displayName; + + /** + * The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard + * has not been modified since the last read. + */ + @JsonProperty("etag") + private String etag; + + /** The contents of the dashboard in serialized string form. */ + @JsonProperty("serialized_dashboard") + private String serializedDashboard; + + /** The warehouse ID used to run the dashboard. */ + @JsonProperty("warehouse_id") + private String warehouseId; + + public UpdateDashboardRequest setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public UpdateDashboardRequest setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public UpdateDashboardRequest setEtag(String etag) { + this.etag = etag; + return this; + } + + public String getEtag() { + return etag; + } + + public UpdateDashboardRequest setSerializedDashboard(String serializedDashboard) { + this.serializedDashboard = serializedDashboard; + return this; + } + + public String getSerializedDashboard() { + return serializedDashboard; + } + + public UpdateDashboardRequest setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateDashboardRequest that = (UpdateDashboardRequest) o; + return Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(displayName, that.displayName) + && Objects.equals(etag, that.etag) + && Objects.equals(serializedDashboard, that.serializedDashboard) + && Objects.equals(warehouseId, that.warehouseId); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId, displayName, etag, serializedDashboard, warehouseId); + } + + @Override + public String toString() { + return new ToStringer(UpdateDashboardRequest.class) + .add("dashboardId", dashboardId) + .add("displayName", displayName) + .add("etag", etag) + .add("serializedDashboard", serializedDashboard) + .add("warehouseId", warehouseId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsAPI.java index be15a4ff9..f1909f28f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/DbfsAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -137,7 +138,7 @@ public Iterable list(String path) { * same functionality without timing out. */ public Iterable list(ListDbfsRequest request) { - return impl.list(request).getFiles(); + return new Paginator<>(request, impl::list, ListStatusResponse::getFiles, response -> null); } public void mkdirs(String path) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesImpl.java index ad4022f8b..95adf6bef 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/FilesImpl.java @@ -2,6 +2,7 @@ package com.databricks.sdk.service.files; import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.core.http.Encoding; import com.databricks.sdk.support.Generated; import java.util.HashMap; import java.util.Map; @@ -17,28 +18,38 @@ public FilesImpl(ApiClient apiClient) { @Override public void createDirectory(CreateDirectoryRequest request) { - String path = String.format("/api/2.0/fs/directories%s", request.getDirectoryPath()); + String path = + String.format( + "/api/2.0/fs/directories%s", + Encoding.encodeMultiSegmentPathParameter(request.getDirectoryPath())); Map headers = new HashMap<>(); apiClient.PUT(path, null, CreateDirectoryResponse.class, headers); } @Override public void delete(DeleteFileRequest request) { - String path = String.format("/api/2.0/fs/files%s", request.getFilePath()); + String path = + String.format( + "/api/2.0/fs/files%s", Encoding.encodeMultiSegmentPathParameter(request.getFilePath())); Map headers = new HashMap<>(); apiClient.DELETE(path, request, DeleteResponse.class, headers); } @Override public void deleteDirectory(DeleteDirectoryRequest request) { - String path = String.format("/api/2.0/fs/directories%s", request.getDirectoryPath()); + String path = + String.format( + "/api/2.0/fs/directories%s", + Encoding.encodeMultiSegmentPathParameter(request.getDirectoryPath())); Map headers = new HashMap<>(); apiClient.DELETE(path, request, DeleteDirectoryResponse.class, headers); } @Override public DownloadResponse download(DownloadRequest request) { - String path = String.format("/api/2.0/fs/files%s", request.getFilePath()); + String path = + String.format( + "/api/2.0/fs/files%s", Encoding.encodeMultiSegmentPathParameter(request.getFilePath())); Map headers = new HashMap<>(); headers.put("Accept", "application/octet-stream"); return apiClient.GET(path, request, DownloadResponse.class, headers); @@ -46,21 +57,29 @@ public DownloadResponse download(DownloadRequest request) { @Override public void getDirectoryMetadata(GetDirectoryMetadataRequest request) { - String path = String.format("/api/2.0/fs/directories%s", request.getDirectoryPath()); + String path = + String.format( + "/api/2.0/fs/directories%s", + Encoding.encodeMultiSegmentPathParameter(request.getDirectoryPath())); Map headers = new HashMap<>(); apiClient.HEAD(path, request, GetDirectoryMetadataResponse.class, headers); } @Override public GetMetadataResponse getMetadata(GetMetadataRequest request) { - String path = String.format("/api/2.0/fs/files%s", request.getFilePath()); + String path = + String.format( + "/api/2.0/fs/files%s", Encoding.encodeMultiSegmentPathParameter(request.getFilePath())); Map headers = new HashMap<>(); return apiClient.HEAD(path, request, GetMetadataResponse.class, headers); } @Override public ListDirectoryResponse listDirectoryContents(ListDirectoryContentsRequest request) { - String path = String.format("/api/2.0/fs/directories%s", request.getDirectoryPath()); + String path = + String.format( + "/api/2.0/fs/directories%s", + Encoding.encodeMultiSegmentPathParameter(request.getDirectoryPath())); Map headers = new HashMap<>(); headers.put("Accept", "application/json"); return apiClient.GET(path, request, ListDirectoryResponse.class, headers); @@ -68,7 +87,9 @@ public ListDirectoryResponse listDirectoryContents(ListDirectoryContentsRequest @Override public void upload(UploadRequest request) { - String path = String.format("/api/2.0/fs/files%s", request.getFilePath()); + String path = + String.format( + "/api/2.0/fs/files%s", Encoding.encodeMultiSegmentPathParameter(request.getFilePath())); Map headers = new HashMap<>(); headers.put("Content-Type", "application/octet-stream"); apiClient.PUT(path, request.getContents(), UploadResponse.class, headers); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java index 7a25723e7..dca4a1bdb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/GetPermissionRequest.java @@ -15,7 +15,7 @@ public class GetPermissionRequest { /** * The type of the request object. Can be one of the following: authorization, clusters, * cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines, - * registered-models, repos, serving-endpoints, or sql-warehouses. + * registered-models, repos, serving-endpoints, or warehouses. */ private String requestObjectType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionLevel.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionLevel.java index 18408b80b..fff729d4c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionLevel.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionLevel.java @@ -15,6 +15,7 @@ public enum PermissionLevel { CAN_MANAGE_PRODUCTION_VERSIONS, CAN_MANAGE_RUN, CAN_MANAGE_STAGING_VERSIONS, + CAN_QUERY, CAN_READ, CAN_RESTART, CAN_RUN, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationAPI.java new file mode 100755 index 000000000..88bb7fa66 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationAPI.java @@ -0,0 +1,50 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * This spec contains undocumented permission migration APIs used in + * https://github.com/databrickslabs/ucx. + */ +@Generated +public class PermissionMigrationAPI { + private static final Logger LOG = LoggerFactory.getLogger(PermissionMigrationAPI.class); + + private final PermissionMigrationService impl; + + /** Regular-use constructor */ + public PermissionMigrationAPI(ApiClient apiClient) { + impl = new PermissionMigrationImpl(apiClient); + } + + /** Constructor for mocks */ + public PermissionMigrationAPI(PermissionMigrationService mock) { + impl = mock; + } + + public PermissionMigrationResponse migratePermissions( + long workspaceId, String fromWorkspaceGroupName, String toAccountGroupName) { + return migratePermissions( + new PermissionMigrationRequest() + .setWorkspaceId(workspaceId) + .setFromWorkspaceGroupName(fromWorkspaceGroupName) + .setToAccountGroupName(toAccountGroupName)); + } + + /** + * Migrate Permissions. + * + *

Migrate a batch of permissions from a workspace local group to an account group. + */ + public PermissionMigrationResponse migratePermissions(PermissionMigrationRequest request) { + return impl.migratePermissions(request); + } + + public PermissionMigrationService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationImpl.java new file mode 100755 index 000000000..d4e939816 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationImpl.java @@ -0,0 +1,26 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of PermissionMigration */ +@Generated +class PermissionMigrationImpl implements PermissionMigrationService { + private final ApiClient apiClient; + + public PermissionMigrationImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public PermissionMigrationResponse migratePermissions(PermissionMigrationRequest request) { + String path = "/api/2.0/permissionmigration"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.POST(path, request, PermissionMigrationResponse.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationRequest.java new file mode 100755 index 000000000..3042a30bf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationRequest.java @@ -0,0 +1,92 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class PermissionMigrationRequest { + /** The name of the workspace group that permissions will be migrated from. */ + @JsonProperty("from_workspace_group_name") + private String fromWorkspaceGroupName; + + /** The maximum number of permissions that will be migrated. */ + @JsonProperty("size") + private Long size; + + /** The name of the account group that permissions will be migrated to. */ + @JsonProperty("to_account_group_name") + private String toAccountGroupName; + + /** + * WorkspaceId of the associated workspace where the permission migration will occur. Both + * workspace group and account group must be in this workspace. + */ + @JsonProperty("workspace_id") + private Long workspaceId; + + public PermissionMigrationRequest setFromWorkspaceGroupName(String fromWorkspaceGroupName) { + this.fromWorkspaceGroupName = fromWorkspaceGroupName; + return this; + } + + public String getFromWorkspaceGroupName() { + return fromWorkspaceGroupName; + } + + public PermissionMigrationRequest setSize(Long size) { + this.size = size; + return this; + } + + public Long getSize() { + return size; + } + + public PermissionMigrationRequest setToAccountGroupName(String toAccountGroupName) { + this.toAccountGroupName = toAccountGroupName; + return this; + } + + public String getToAccountGroupName() { + return toAccountGroupName; + } + + public PermissionMigrationRequest setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + return this; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PermissionMigrationRequest that = (PermissionMigrationRequest) o; + return Objects.equals(fromWorkspaceGroupName, that.fromWorkspaceGroupName) + && Objects.equals(size, that.size) + && Objects.equals(toAccountGroupName, that.toAccountGroupName) + && Objects.equals(workspaceId, that.workspaceId); + } + + @Override + public int hashCode() { + return Objects.hash(fromWorkspaceGroupName, size, toAccountGroupName, workspaceId); + } + + @Override + public String toString() { + return new ToStringer(PermissionMigrationRequest.class) + .add("fromWorkspaceGroupName", fromWorkspaceGroupName) + .add("size", size) + .add("toAccountGroupName", toAccountGroupName) + .add("workspaceId", workspaceId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationResponse.java new file mode 100755 index 000000000..24ab24f9f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationResponse.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class PermissionMigrationResponse { + /** Number of permissions migrated. */ + @JsonProperty("permissions_migrated") + private Long permissionsMigrated; + + public PermissionMigrationResponse setPermissionsMigrated(Long permissionsMigrated) { + this.permissionsMigrated = permissionsMigrated; + return this; + } + + public Long getPermissionsMigrated() { + return permissionsMigrated; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PermissionMigrationResponse that = (PermissionMigrationResponse) o; + return Objects.equals(permissionsMigrated, that.permissionsMigrated); + } + + @Override + public int hashCode() { + return Objects.hash(permissionsMigrated); + } + + @Override + public String toString() { + return new ToStringer(PermissionMigrationResponse.class) + .add("permissionsMigrated", permissionsMigrated) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationService.java new file mode 100755 index 000000000..61f8956ce --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionMigrationService.java @@ -0,0 +1,23 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.iam; + +import com.databricks.sdk.support.Generated; + +/** + * This spec contains undocumented permission migration APIs used in + * https://github.com/databrickslabs/ucx. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface PermissionMigrationService { + /** + * Migrate Permissions. + * + *

Migrate a batch of permissions from a workspace local group to an account group. + */ + PermissionMigrationResponse migratePermissions( + PermissionMigrationRequest permissionMigrationRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java index dc90904be..55eb37ad0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsAPI.java @@ -51,6 +51,9 @@ *

For the mapping of the required permissions for specific actions or abilities and other * important information, see [Access Control]. * + *

Note that to manage access control on service principals, use **[Account Access Control + * Proxy](:service:accountaccesscontrolproxy)**. + * *

[Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html */ @Generated diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsRequest.java index f559eb51d..6643d1063 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsRequest.java @@ -20,7 +20,7 @@ public class PermissionsRequest { /** * The type of the request object. Can be one of the following: authorization, clusters, * cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines, - * registered-models, repos, serving-endpoints, or sql-warehouses. + * registered-models, repos, serving-endpoints, or warehouses. */ private String requestObjectType; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java index ee7def206..b78dbb72d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PermissionsService.java @@ -48,6 +48,9 @@ *

For the mapping of the required permissions for specific actions or abilities and other * important information, see [Access Control]. * + *

Note that to manage access control on service principals, use **[Account Access Control + * Proxy](:service:accountaccesscontrolproxy)**. + * *

[Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html * *

This is the high-level interface, that contains generated methods. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PrincipalOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PrincipalOutput.java index ce5380f71..e3810f4d8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PrincipalOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/PrincipalOutput.java @@ -13,7 +13,7 @@ public class PrincipalOutput { @JsonProperty("display_name") private String displayName; - /** The group name of the groupl. Present only if the principal is a group. */ + /** The group name of the group. Present only if the principal is a group. */ @JsonProperty("group_name") private String groupName; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateWorkspaceAssignments.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateWorkspaceAssignments.java index 3089c6e8b..8a0c4bd90 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateWorkspaceAssignments.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/UpdateWorkspaceAssignments.java @@ -10,7 +10,11 @@ @Generated public class UpdateWorkspaceAssignments { - /** Array of permissions assignments to update on the workspace. */ + /** + * Array of permissions assignments to update on the workspace. Note that excluding this field + * will have the same effect as providing an empty list which will result in the deletion of all + * permissions for the principal. + */ @JsonProperty("permissions") private Collection permissions; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentAPI.java index 7b7e56a80..c04150b74 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import java.util.Collection; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -68,12 +69,13 @@ public Iterable list(long workspaceId) { * workspace. */ public Iterable list(ListWorkspaceAssignmentRequest request) { - return impl.list(request).getPermissionAssignments(); + return new Paginator<>( + request, impl::list, PermissionAssignments::getPermissionAssignments, response -> null); } - public void update( + public PermissionAssignment update( long workspaceId, long principalId, Collection permissions) { - update( + return update( new UpdateWorkspaceAssignments() .setWorkspaceId(workspaceId) .setPrincipalId(principalId) @@ -86,8 +88,8 @@ public void update( *

Creates or updates the workspace permissions assignment in a given account and workspace for * the specified principal. */ - public void update(UpdateWorkspaceAssignments request) { - impl.update(request); + public PermissionAssignment update(UpdateWorkspaceAssignments request) { + return impl.update(request); } public WorkspaceAssignmentService impl() { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentImpl.java index 4372b5dd9..49ffc9ca8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentImpl.java @@ -49,7 +49,7 @@ public PermissionAssignments list(ListWorkspaceAssignmentRequest request) { } @Override - public void update(UpdateWorkspaceAssignments request) { + public PermissionAssignment update(UpdateWorkspaceAssignments request) { String path = String.format( "/api/2.0/accounts/%s/workspaces/%s/permissionassignments/principals/%s", @@ -57,6 +57,6 @@ public void update(UpdateWorkspaceAssignments request) { Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); - apiClient.PUT(path, request, WorkspaceAssignmentsUpdated.class, headers); + return apiClient.PUT(path, request, PermissionAssignment.class, headers); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentService.java index 45bf51b24..4ec0dca6d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentService.java @@ -42,5 +42,5 @@ public interface WorkspaceAssignmentService { *

Creates or updates the workspace permissions assignment in a given account and workspace for * the specified principal. */ - void update(UpdateWorkspaceAssignments updateWorkspaceAssignments); + PermissionAssignment update(UpdateWorkspaceAssignments updateWorkspaceAssignments); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java index 6367acafb..a4f4827aa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/BaseRun.java @@ -47,6 +47,10 @@ public class BaseRun { @JsonProperty("creator_user_name") private String creatorUserName; + /** Description of the run */ + @JsonProperty("description") + private String description; + /** * The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This * field is set to 0 if the job is still running. @@ -109,6 +113,14 @@ public class BaseRun { @JsonProperty("overriding_parameters") private RunParameters overridingParameters; + /** The time in milliseconds that the run has spent in the queue. */ + @JsonProperty("queue_duration") + private Long queueDuration; + + /** The repair history of the run. */ + @JsonProperty("repair_history") + private Collection repairHistory; + /** The time in milliseconds it took the job run and all of its repairs to finish. */ @JsonProperty("run_duration") private Long runDuration; @@ -126,9 +138,9 @@ public class BaseRun { private String runPageUrl; /** - * * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. * `WORKFLOW_RUN`: Workflow - * run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit run. A run created with - * :method:jobs/submit. + * The type of a run. * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. * + * `WORKFLOW_RUN`: Workflow run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit + * run. A run created with :method:jobs/submit. * *

[dbutils.notebook.run]: * https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow @@ -183,7 +195,7 @@ public class BaseRun { @JsonProperty("trigger") private TriggerType trigger; - /** */ + /** Additional details about what triggered the run */ @JsonProperty("trigger_info") private TriggerInfo triggerInfo; @@ -232,6 +244,15 @@ public String getCreatorUserName() { return creatorUserName; } + public BaseRun setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + public BaseRun setEndTime(Long endTime) { this.endTime = endTime; return this; @@ -313,6 +334,24 @@ public RunParameters getOverridingParameters() { return overridingParameters; } + public BaseRun setQueueDuration(Long queueDuration) { + this.queueDuration = queueDuration; + return this; + } + + public Long getQueueDuration() { + return queueDuration; + } + + public BaseRun setRepairHistory(Collection repairHistory) { + this.repairHistory = repairHistory; + return this; + } + + public Collection getRepairHistory() { + return repairHistory; + } + public BaseRun setRunDuration(Long runDuration) { this.runDuration = runDuration; return this; @@ -431,6 +470,7 @@ public boolean equals(Object o) { && Objects.equals(clusterInstance, that.clusterInstance) && Objects.equals(clusterSpec, that.clusterSpec) && Objects.equals(creatorUserName, that.creatorUserName) + && Objects.equals(description, that.description) && Objects.equals(endTime, that.endTime) && Objects.equals(executionDuration, that.executionDuration) && Objects.equals(gitSource, that.gitSource) @@ -440,6 +480,8 @@ public boolean equals(Object o) { && Objects.equals(numberInJob, that.numberInJob) && Objects.equals(originalAttemptRunId, that.originalAttemptRunId) && Objects.equals(overridingParameters, that.overridingParameters) + && Objects.equals(queueDuration, that.queueDuration) + && Objects.equals(repairHistory, that.repairHistory) && Objects.equals(runDuration, that.runDuration) && Objects.equals(runId, that.runId) && Objects.equals(runName, that.runName) @@ -462,6 +504,7 @@ public int hashCode() { clusterInstance, clusterSpec, creatorUserName, + description, endTime, executionDuration, gitSource, @@ -471,6 +514,8 @@ public int hashCode() { numberInJob, originalAttemptRunId, overridingParameters, + queueDuration, + repairHistory, runDuration, runId, runName, @@ -493,6 +538,7 @@ public String toString() { .add("clusterInstance", clusterInstance) .add("clusterSpec", clusterSpec) .add("creatorUserName", creatorUserName) + .add("description", description) .add("endTime", endTime) .add("executionDuration", executionDuration) .add("gitSource", gitSource) @@ -502,6 +548,8 @@ public String toString() { .add("numberInJob", numberInJob) .add("originalAttemptRunId", originalAttemptRunId) .add("overridingParameters", overridingParameters) + .add("queueDuration", queueDuration) + .add("repairHistory", repairHistory) .add("runDuration", runDuration) .add("runId", runId) .add("runName", runName) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ClusterSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ClusterSpec.java index 811e3921f..4e080a1e3 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ClusterSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ClusterSpec.java @@ -11,21 +11,28 @@ @Generated public class ClusterSpec { /** - * If existing_cluster_id, the ID of an existing cluster that is used for all runs of this job. - * When running jobs on an existing cluster, you may need to manually restart the cluster if it - * stops responding. We suggest running jobs on new clusters for greater reliability + * If existing_cluster_id, the ID of an existing cluster that is used for all runs. When running + * jobs or tasks on an existing cluster, you may need to manually restart the cluster if it stops + * responding. We suggest running jobs and tasks on new clusters for greater reliability */ @JsonProperty("existing_cluster_id") private String existingClusterId; /** - * An optional list of libraries to be installed on the cluster that executes the job. The default - * value is an empty list. + * If job_cluster_key, this task is executed reusing the cluster specified in + * `job.settings.job_clusters`. + */ + @JsonProperty("job_cluster_key") + private String jobClusterKey; + + /** + * An optional list of libraries to be installed on the cluster. The default value is an empty + * list. */ @JsonProperty("libraries") private Collection libraries; - /** If new_cluster, a description of a cluster that is created for each run. */ + /** If new_cluster, a description of a new cluster that is created for each run. */ @JsonProperty("new_cluster") private com.databricks.sdk.service.compute.ClusterSpec newCluster; @@ -38,6 +45,15 @@ public String getExistingClusterId() { return existingClusterId; } + public ClusterSpec setJobClusterKey(String jobClusterKey) { + this.jobClusterKey = jobClusterKey; + return this; + } + + public String getJobClusterKey() { + return jobClusterKey; + } + public ClusterSpec setLibraries( Collection libraries) { this.libraries = libraries; @@ -63,19 +79,21 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; ClusterSpec that = (ClusterSpec) o; return Objects.equals(existingClusterId, that.existingClusterId) + && Objects.equals(jobClusterKey, that.jobClusterKey) && Objects.equals(libraries, that.libraries) && Objects.equals(newCluster, that.newCluster); } @Override public int hashCode() { - return Objects.hash(existingClusterId, libraries, newCluster); + return Objects.hash(existingClusterId, jobClusterKey, libraries, newCluster); } @Override public String toString() { return new ToStringer(ClusterSpec.class) .add("existingClusterId", existingClusterId) + .add("jobClusterKey", jobClusterKey) .add("libraries", libraries) .add("newCluster", newCluster) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java index 4a102d975..8c55da7c0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java @@ -15,10 +15,6 @@ public class CreateJob { @JsonProperty("access_control_list") private Collection accessControlList; - /** A list of compute requirements that can be referenced by tasks of this job. */ - @JsonProperty("compute") - private Collection compute; - /** * An optional continuous property for this job. The continuous property will ensure that there is * always one run executing. Only one of `schedule` and `continuous` can be used. @@ -43,7 +39,7 @@ public class CreateJob { * is in an editable state and can be modified. */ @JsonProperty("edit_mode") - private CreateJobEditMode editMode; + private JobEditMode editMode; /** * An optional set of email addresses that is notified when runs of this job begin or complete as @@ -52,6 +48,13 @@ public class CreateJob { @JsonProperty("email_notifications") private JobEmailNotifications emailNotifications; + /** + * A list of task execution environment specifications that can be referenced by tasks of this + * job. + */ + @JsonProperty("environments") + private Collection environments; + /** * Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls. * When using the Jobs API 2.1 this value is always set to `"MULTI_TASK"`. @@ -86,19 +89,14 @@ public class CreateJob { private Collection jobClusters; /** - * An optional maximum allowed number of concurrent runs of the job. - * - *

Set this value if you want to be able to execute multiple runs of the same job concurrently. - * This is useful for example if you trigger your job on a frequent schedule and want to allow - * consecutive runs to overlap with each other, or if you want to trigger multiple runs which - * differ by their input parameters. - * - *

This setting affects only new runs. For example, suppose the job’s concurrency is 4 and - * there are 4 concurrent active runs. Then setting the concurrency to 3 won’t kill any of the - * active runs. However, from then on, new runs are skipped unless there are fewer than 3 active - * runs. - * - *

This value cannot exceed 1000. Setting this value to `0` causes all new runs to be skipped. + * An optional maximum allowed number of concurrent runs of the job. Set this value if you want to + * be able to execute multiple runs of the same job concurrently. This is useful for example if + * you trigger your job on a frequent schedule and want to allow consecutive runs to overlap with + * each other, or if you want to trigger multiple runs which differ by their input parameters. + * This setting affects only new runs. For example, suppose the job’s concurrency is 4 and there + * are 4 concurrent active runs. Then setting the concurrency to 3 won’t kill any of the active + * runs. However, from then on, new runs are skipped unless there are fewer than 3 active runs. + * This value cannot exceed 1000. Setting this value to `0` causes all new runs to be skipped. */ @JsonProperty("max_concurrent_runs") private Long maxConcurrentRuns; @@ -178,15 +176,6 @@ public Collection getAccess return accessControlList; } - public CreateJob setCompute(Collection compute) { - this.compute = compute; - return this; - } - - public Collection getCompute() { - return compute; - } - public CreateJob setContinuous(Continuous continuous) { this.continuous = continuous; return this; @@ -214,12 +203,12 @@ public String getDescription() { return description; } - public CreateJob setEditMode(CreateJobEditMode editMode) { + public CreateJob setEditMode(JobEditMode editMode) { this.editMode = editMode; return this; } - public CreateJobEditMode getEditMode() { + public JobEditMode getEditMode() { return editMode; } @@ -232,6 +221,15 @@ public JobEmailNotifications getEmailNotifications() { return emailNotifications; } + public CreateJob setEnvironments(Collection environments) { + this.environments = environments; + return this; + } + + public Collection getEnvironments() { + return environments; + } + public CreateJob setFormat(Format format) { this.format = format; return this; @@ -382,12 +380,12 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; CreateJob that = (CreateJob) o; return Objects.equals(accessControlList, that.accessControlList) - && Objects.equals(compute, that.compute) && Objects.equals(continuous, that.continuous) && Objects.equals(deployment, that.deployment) && Objects.equals(description, that.description) && Objects.equals(editMode, that.editMode) && Objects.equals(emailNotifications, that.emailNotifications) + && Objects.equals(environments, that.environments) && Objects.equals(format, that.format) && Objects.equals(gitSource, that.gitSource) && Objects.equals(health, that.health) @@ -410,12 +408,12 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( accessControlList, - compute, continuous, deployment, description, editMode, emailNotifications, + environments, format, gitSource, health, @@ -438,12 +436,12 @@ public int hashCode() { public String toString() { return new ToStringer(CreateJob.class) .add("accessControlList", accessControlList) - .add("compute", compute) .add("continuous", continuous) .add("deployment", deployment) .add("description", description) .add("editMode", editMode) .add("emailNotifications", emailNotifications) + .add("environments", environments) .add("format", format) .add("gitSource", gitSource) .add("health", health) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateResponse.java index 80540b27b..30e422378 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateResponse.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Job was created successfully */ @Generated public class CreateResponse { /** The canonical identifier for the newly created job. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CronSchedule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CronSchedule.java index c9f871729..6cbe5d60a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CronSchedule.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CronSchedule.java @@ -15,7 +15,7 @@ public class CronSchedule { /** * A Cron expression using Quartz syntax that describes the schedule for a job. See [Cron Trigger] - * for details. This field is required." + * for details. This field is required. * *

[Cron Trigger]: * http://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtTask.java index 5bb980184..b27b556a2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DbtTask.java @@ -49,11 +49,11 @@ public class DbtTask { /** * Optional location type of the project directory. When set to `WORKSPACE`, the project will be - * retrieved from the local workspace. When set to `GIT`, the project will be - * retrieved from a Git repository defined in `git_source`. If the value is empty, the task will - * use `GIT` if `git_source` is defined and `WORKSPACE` otherwise. + * retrieved from the local Databricks workspace. When set to `GIT`, the project will be retrieved + * from a Git repository defined in `git_source`. If the value is empty, the task will use `GIT` + * if `git_source` is defined and `WORKSPACE` otherwise. * - *

* `WORKSPACE`: Project is located in workspace. * `GIT`: Project is located in + *

* `WORKSPACE`: Project is located in Databricks workspace. * `GIT`: Project is located in * cloud Git provider. */ @JsonProperty("source") diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteRun.java index af5bfd438..263bd5cc8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/DeleteRun.java @@ -9,7 +9,7 @@ @Generated public class DeleteRun { - /** The canonical identifier of the run for which to retrieve the metadata. */ + /** ID of the run to delete. */ @JsonProperty("run_id") private Long runId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ExportRunOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ExportRunOutput.java index dae77a0f9..09a4c43b7 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ExportRunOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ExportRunOutput.java @@ -8,6 +8,7 @@ import java.util.Collection; import java.util.Objects; +/** Run was exported successfully. */ @Generated public class ExportRunOutput { /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/FileArrivalTriggerConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/FileArrivalTriggerConfiguration.java index 42e8a4262..bbebd02f0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/FileArrivalTriggerConfiguration.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/FileArrivalTriggerConfiguration.java @@ -17,8 +17,8 @@ public class FileArrivalTriggerConfiguration { private Long minTimeBetweenTriggersSeconds; /** - * The storage location to monitor for file arrivals. The value must point to the root or a - * subpath of an external location URL or the root or subpath of a Unity Catalog volume. + * URL to be monitored for file arrivals. The path must point to the root or a subpath of the + * external location. */ @JsonProperty("url") private String url; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachStats.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachStats.java index 44e177bfa..62611e459 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachStats.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachStats.java @@ -5,24 +5,26 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; import java.util.Objects; @Generated public class ForEachStats { /** Sample of 3 most common error messages occurred during the iteration. */ @JsonProperty("error_message_stats") - private ForEachTaskErrorMessageStats errorMessageStats; + private Collection errorMessageStats; /** Describes stats of the iteration. Only latest retries are considered. */ @JsonProperty("task_run_stats") private ForEachTaskTaskRunStats taskRunStats; - public ForEachStats setErrorMessageStats(ForEachTaskErrorMessageStats errorMessageStats) { + public ForEachStats setErrorMessageStats( + Collection errorMessageStats) { this.errorMessageStats = errorMessageStats; return this; } - public ForEachTaskErrorMessageStats getErrorMessageStats() { + public Collection getErrorMessageStats() { return errorMessageStats; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTask.java index 40d354388..517e20236 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTask.java @@ -19,7 +19,7 @@ public class ForEachTask { @JsonProperty("inputs") private String inputs; - /** */ + /** Configuration for the task that will be run for each element in the array */ @JsonProperty("task") private Task task; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTaskErrorMessageStats.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTaskErrorMessageStats.java index a51fabf96..f4386486e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTaskErrorMessageStats.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ForEachTaskErrorMessageStats.java @@ -11,18 +11,18 @@ public class ForEachTaskErrorMessageStats { /** Describes the count of such error message encountered during the iterations. */ @JsonProperty("count") - private String count; + private Long count; /** Describes the error message occured during the iterations. */ @JsonProperty("error_message") private String errorMessage; - public ForEachTaskErrorMessageStats setCount(String count) { + public ForEachTaskErrorMessageStats setCount(Long count) { this.count = count; return this; } - public String getCount() { + public Long getCount() { return count; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunOutputRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunOutputRequest.java index 50e718384..983b29488 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunOutputRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/GetRunOutputRequest.java @@ -10,7 +10,7 @@ /** Get the output for a single run */ @Generated public class GetRunOutputRequest { - /** The canonical identifier for the run. This field is required. */ + /** The canonical identifier for the run. */ @QueryParam("run_id") private Long runId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java index a3e77ae05..bbee1c6fa 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Job.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Job was retrieved successfully. */ @Generated public class Job { /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobCompute.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobCompute.java deleted file mode 100755 index 7a738f26a..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobCompute.java +++ /dev/null @@ -1,62 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.jobs; - -import com.databricks.sdk.support.Generated; -import com.databricks.sdk.support.ToStringer; -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.Objects; - -@Generated -public class JobCompute { - /** - * A unique name for the compute requirement. This field is required and must be unique within the - * job. `JobTaskSettings` may refer to this field to determine the compute requirements for the - * task execution. - */ - @JsonProperty("compute_key") - private String computeKey; - - /** */ - @JsonProperty("spec") - private com.databricks.sdk.service.compute.ComputeSpec spec; - - public JobCompute setComputeKey(String computeKey) { - this.computeKey = computeKey; - return this; - } - - public String getComputeKey() { - return computeKey; - } - - public JobCompute setSpec(com.databricks.sdk.service.compute.ComputeSpec spec) { - this.spec = spec; - return this; - } - - public com.databricks.sdk.service.compute.ComputeSpec getSpec() { - return spec; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - JobCompute that = (JobCompute) o; - return Objects.equals(computeKey, that.computeKey) && Objects.equals(spec, that.spec); - } - - @Override - public int hashCode() { - return Objects.hash(computeKey, spec); - } - - @Override - public String toString() { - return new ToStringer(JobCompute.class) - .add("computeKey", computeKey) - .add("spec", spec) - .toString(); - } -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeploymentKind.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeploymentKind.java index 52683b09a..16aa8c0ca 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeploymentKind.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobDeploymentKind.java @@ -4,11 +4,7 @@ import com.databricks.sdk.support.Generated; -/** - * The kind of deployment that manages the job. - * - *

* `BUNDLE`: The job is managed by Databricks Asset Bundle. - */ +/** * `BUNDLE`: The job is managed by Databricks Asset Bundle. */ @Generated public enum JobDeploymentKind { BUNDLE, // The job is managed by Databricks Asset Bundle. diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJobEditMode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEditMode.java similarity index 94% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJobEditMode.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEditMode.java index c679a5e00..624c86967 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJobEditMode.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEditMode.java @@ -11,7 +11,7 @@ * is in an editable state and can be modified. */ @Generated -public enum CreateJobEditMode { +public enum JobEditMode { EDITABLE, // The job is in an editable state and can be modified. UI_LOCKED, // The job is in a locked UI state and cannot be modified. } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEnvironment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEnvironment.java new file mode 100755 index 000000000..9fb6ddbdd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobEnvironment.java @@ -0,0 +1,62 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.jobs; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class JobEnvironment { + /** The key of an environment. It has to be unique within a job. */ + @JsonProperty("environment_key") + private String environmentKey; + + /** + * The a environment entity used to preserve serverless environment side panel and jobs' + * environment for non-notebook task. In this minimal environment spec, only pip dependencies are + * supported. Next ID: 5 + */ + @JsonProperty("spec") + private com.databricks.sdk.service.compute.Environment spec; + + public JobEnvironment setEnvironmentKey(String environmentKey) { + this.environmentKey = environmentKey; + return this; + } + + public String getEnvironmentKey() { + return environmentKey; + } + + public JobEnvironment setSpec(com.databricks.sdk.service.compute.Environment spec) { + this.spec = spec; + return this; + } + + public com.databricks.sdk.service.compute.Environment getSpec() { + return spec; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + JobEnvironment that = (JobEnvironment) o; + return Objects.equals(environmentKey, that.environmentKey) && Objects.equals(spec, that.spec); + } + + @Override + public int hashCode() { + return Objects.hash(environmentKey, spec); + } + + @Override + public String toString() { + return new ToStringer(JobEnvironment.class) + .add("environmentKey", environmentKey) + .add("spec", spec) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java index c82f275ec..b604e002a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java @@ -11,10 +11,6 @@ @Generated public class JobSettings { - /** A list of compute requirements that can be referenced by tasks of this job. */ - @JsonProperty("compute") - private Collection compute; - /** * An optional continuous property for this job. The continuous property will ensure that there is * always one run executing. Only one of `schedule` and `continuous` can be used. @@ -39,7 +35,7 @@ public class JobSettings { * is in an editable state and can be modified. */ @JsonProperty("edit_mode") - private JobSettingsEditMode editMode; + private JobEditMode editMode; /** * An optional set of email addresses that is notified when runs of this job begin or complete as @@ -48,6 +44,13 @@ public class JobSettings { @JsonProperty("email_notifications") private JobEmailNotifications emailNotifications; + /** + * A list of task execution environment specifications that can be referenced by tasks of this + * job. + */ + @JsonProperty("environments") + private Collection environments; + /** * Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls. * When using the Jobs API 2.1 this value is always set to `"MULTI_TASK"`. @@ -82,19 +85,14 @@ public class JobSettings { private Collection jobClusters; /** - * An optional maximum allowed number of concurrent runs of the job. - * - *

Set this value if you want to be able to execute multiple runs of the same job concurrently. - * This is useful for example if you trigger your job on a frequent schedule and want to allow - * consecutive runs to overlap with each other, or if you want to trigger multiple runs which - * differ by their input parameters. - * - *

This setting affects only new runs. For example, suppose the job’s concurrency is 4 and - * there are 4 concurrent active runs. Then setting the concurrency to 3 won’t kill any of the - * active runs. However, from then on, new runs are skipped unless there are fewer than 3 active - * runs. - * - *

This value cannot exceed 1000. Setting this value to `0` causes all new runs to be skipped. + * An optional maximum allowed number of concurrent runs of the job. Set this value if you want to + * be able to execute multiple runs of the same job concurrently. This is useful for example if + * you trigger your job on a frequent schedule and want to allow consecutive runs to overlap with + * each other, or if you want to trigger multiple runs which differ by their input parameters. + * This setting affects only new runs. For example, suppose the job’s concurrency is 4 and there + * are 4 concurrent active runs. Then setting the concurrency to 3 won’t kill any of the active + * runs. However, from then on, new runs are skipped unless there are fewer than 3 active runs. + * This value cannot exceed 1000. Setting this value to `0` causes all new runs to be skipped. */ @JsonProperty("max_concurrent_runs") private Long maxConcurrentRuns; @@ -164,15 +162,6 @@ public class JobSettings { @JsonProperty("webhook_notifications") private WebhookNotifications webhookNotifications; - public JobSettings setCompute(Collection compute) { - this.compute = compute; - return this; - } - - public Collection getCompute() { - return compute; - } - public JobSettings setContinuous(Continuous continuous) { this.continuous = continuous; return this; @@ -200,12 +189,12 @@ public String getDescription() { return description; } - public JobSettings setEditMode(JobSettingsEditMode editMode) { + public JobSettings setEditMode(JobEditMode editMode) { this.editMode = editMode; return this; } - public JobSettingsEditMode getEditMode() { + public JobEditMode getEditMode() { return editMode; } @@ -218,6 +207,15 @@ public JobEmailNotifications getEmailNotifications() { return emailNotifications; } + public JobSettings setEnvironments(Collection environments) { + this.environments = environments; + return this; + } + + public Collection getEnvironments() { + return environments; + } + public JobSettings setFormat(Format format) { this.format = format; return this; @@ -367,12 +365,12 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; JobSettings that = (JobSettings) o; - return Objects.equals(compute, that.compute) - && Objects.equals(continuous, that.continuous) + return Objects.equals(continuous, that.continuous) && Objects.equals(deployment, that.deployment) && Objects.equals(description, that.description) && Objects.equals(editMode, that.editMode) && Objects.equals(emailNotifications, that.emailNotifications) + && Objects.equals(environments, that.environments) && Objects.equals(format, that.format) && Objects.equals(gitSource, that.gitSource) && Objects.equals(health, that.health) @@ -394,12 +392,12 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - compute, continuous, deployment, description, editMode, emailNotifications, + environments, format, gitSource, health, @@ -421,12 +419,12 @@ public int hashCode() { @Override public String toString() { return new ToStringer(JobSettings.class) - .add("compute", compute) .add("continuous", continuous) .add("deployment", deployment) .add("description", description) .add("editMode", editMode) .add("emailNotifications", emailNotifications) + .add("environments", environments) .add("format", format) .add("gitSource", gitSource) .add("health", health) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettingsEditMode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettingsEditMode.java deleted file mode 100755 index ff01d4574..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettingsEditMode.java +++ /dev/null @@ -1,17 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.jobs; - -import com.databricks.sdk.support.Generated; - -/** - * Edit mode of the job. - * - *

* `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job - * is in an editable state and can be modified. - */ -@Generated -public enum JobSettingsEditMode { - EDITABLE, // The job is in an editable state and can be modified. - UI_LOCKED, // The job is in a locked UI state and cannot be modified. -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsRequest.java index fbbba3adc..0e1d24780 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsRequest.java @@ -26,9 +26,8 @@ public class ListJobsRequest { private String name; /** - * The offset of the first job to return, relative to the most recently created job. - * - *

Deprecated since June 2023. Use `page_token` to iterate through the pages instead. + * The offset of the first job to return, relative to the most recently created job. Deprecated + * since June 2023. Use `page_token` to iterate through the pages instead. */ @QueryParam("offset") private Long offset; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsResponse.java index f04915e9e..b45e2ca24 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsResponse.java @@ -8,21 +8,22 @@ import java.util.Collection; import java.util.Objects; +/** List of jobs was retrieved successfully. */ @Generated public class ListJobsResponse { /** If true, additional jobs matching the provided filter are available for listing. */ @JsonProperty("has_more") private Boolean hasMore; - /** The list of jobs. */ + /** The list of jobs. Only included in the response if there are jobs to list. */ @JsonProperty("jobs") private Collection jobs; - /** A token that can be used to list the next page of jobs. */ + /** A token that can be used to list the next page of jobs (if applicable). */ @JsonProperty("next_page_token") private String nextPageToken; - /** A token that can be used to list the previous page of jobs. */ + /** A token that can be used to list the previous page of jobs (if applicable). */ @JsonProperty("prev_page_token") private String prevPageToken; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsRequest.java index d5db3c5e4..8a32835c4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsRequest.java @@ -41,9 +41,8 @@ public class ListRunsRequest { private Long limit; /** - * The offset of the first run to return, relative to the most recent run. - * - *

Deprecated since June 2023. Use `page_token` to iterate through the pages instead. + * The offset of the first run to return, relative to the most recent run. Deprecated since June + * 2023. Use `page_token` to iterate through the pages instead. */ @QueryParam("offset") private Long offset; @@ -57,7 +56,7 @@ public class ListRunsRequest { /** The type of runs to return. For a description of run types, see :method:jobs/getRun. */ @QueryParam("run_type") - private ListRunsRunType runType; + private RunType runType; /** * Show runs that started _at or after_ this value. The value must be a UTC timestamp in @@ -136,12 +135,12 @@ public String getPageToken() { return pageToken; } - public ListRunsRequest setRunType(ListRunsRunType runType) { + public ListRunsRequest setRunType(RunType runType) { this.runType = runType; return this; } - public ListRunsRunType getRunType() { + public RunType getRunType() { return runType; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsResponse.java index 5d824001c..8e6ecea17 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsResponse.java @@ -8,21 +8,25 @@ import java.util.Collection; import java.util.Objects; +/** List of runs was retrieved successfully. */ @Generated public class ListRunsResponse { /** If true, additional runs matching the provided filter are available for listing. */ @JsonProperty("has_more") private Boolean hasMore; - /** A token that can be used to list the next page of runs. */ + /** A token that can be used to list the next page of runs (if applicable). */ @JsonProperty("next_page_token") private String nextPageToken; - /** A token that can be used to list the previous page of runs. */ + /** A token that can be used to list the previous page of runs (if applicable). */ @JsonProperty("prev_page_token") private String prevPageToken; - /** A list of runs, from most recently started to least. */ + /** + * A list of runs, from most recently started to least. Only included in the response if there are + * runs to list. + */ @JsonProperty("runs") private Collection runs; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsRunType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsRunType.java deleted file mode 100755 index 026536911..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsRunType.java +++ /dev/null @@ -1,22 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.jobs; - -import com.databricks.sdk.support.Generated; - -/** - * * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. * `WORKFLOW_RUN`: Workflow - * run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit run. A run created with - * :method:jobs/submit. - * - *

[dbutils.notebook.run]: - * https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow - */ -@Generated -public enum ListRunsRunType { - JOB_RUN, // Normal job run. A run created with :method:jobs/runNow. - SUBMIT_RUN, // Submit run. A run created with :method:jobs/submit. - WORKFLOW_RUN, // Workflow run. A run created with - // [dbutils.notebook.run](/dev-tools/databricks-utils.html#dbutils-workflow). - -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/NotebookTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/NotebookTask.java index ded5f3c87..4251a2493 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/NotebookTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/NotebookTask.java @@ -12,10 +12,9 @@ public class NotebookTask { /** * Base parameters to be used for each run of this job. If the run is initiated by a call to - * :method:jobs/runNow with parameters specified, the two parameters maps are merged. If the same - * key is specified in `base_parameters` and in `run-now`, the value from `run-now` is used. - * - *

Use [task parameter variables] such as `{{job.id}}` to pass context about job runs. + * :method:jobs/run Now with parameters specified, the two parameters maps are merged. If the same + * key is specified in `base_parameters` and in `run-now`, the value from `run-now` is used. Use + * [Task parameter variables] to set parameters containing information about job runs. * *

If the notebook takes a parameter that is not specified in the job’s `base_parameters` or * the `run-now` override parameters, the default value from the notebook is used. @@ -24,9 +23,9 @@ public class NotebookTask { * *

The JSON representation of this field cannot exceed 1MB. * - *

[dbutils.widgets.get]: - * https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-widgets [task parameter - * variables]: https://docs.databricks.com/workflows/jobs/parameter-value-references.html + *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables + * [dbutils.widgets.get]: + * https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-widgets */ @JsonProperty("base_parameters") private Map baseParameters; @@ -41,16 +40,24 @@ public class NotebookTask { /** * Optional location type of the notebook. When set to `WORKSPACE`, the notebook will be retrieved - * from the local workspace. When set to `GIT`, the notebook will be retrieved from a + * from the local Databricks workspace. When set to `GIT`, the notebook will be retrieved from a * Git repository defined in `git_source`. If the value is empty, the task will use `GIT` if - * `git_source` is defined and `WORKSPACE` otherwise. - * - *

* `WORKSPACE`: Notebook is located in workspace. * `GIT`: Notebook is located - * in cloud Git provider. + * `git_source` is defined and `WORKSPACE` otherwise. * `WORKSPACE`: Notebook is located in + * Databricks workspace. * `GIT`: Notebook is located in cloud Git provider. */ @JsonProperty("source") private Source source; + /** + * Optional `warehouse_id` to run the notebook on a SQL warehouse. Classic SQL warehouses are NOT + * supported, please use serverless or pro SQL warehouses. + * + *

Note that SQL warehouses only support SQL cells; if the notebook contains non-SQL cells, the + * run will fail. + */ + @JsonProperty("warehouse_id") + private String warehouseId; + public NotebookTask setBaseParameters(Map baseParameters) { this.baseParameters = baseParameters; return this; @@ -78,6 +85,15 @@ public Source getSource() { return source; } + public NotebookTask setWarehouseId(String warehouseId) { + this.warehouseId = warehouseId; + return this; + } + + public String getWarehouseId() { + return warehouseId; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -85,12 +101,13 @@ public boolean equals(Object o) { NotebookTask that = (NotebookTask) o; return Objects.equals(baseParameters, that.baseParameters) && Objects.equals(notebookPath, that.notebookPath) - && Objects.equals(source, that.source); + && Objects.equals(source, that.source) + && Objects.equals(warehouseId, that.warehouseId); } @Override public int hashCode() { - return Objects.hash(baseParameters, notebookPath, source); + return Objects.hash(baseParameters, notebookPath, source, warehouseId); } @Override @@ -99,6 +116,7 @@ public String toString() { .add("baseParameters", baseParameters) .add("notebookPath", notebookPath) .add("source", source) + .add("warehouseId", warehouseId) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PipelineTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PipelineTask.java index 4de0c7658..d7b279dd6 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PipelineTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/PipelineTask.java @@ -9,7 +9,7 @@ @Generated public class PipelineTask { - /** If true, a full refresh will be triggered on the delta live table. */ + /** If true, triggers a full refresh on the delta live table. */ @JsonProperty("full_refresh") private Boolean fullRefresh; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java index 2707c5082..6db8bf979 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRun.java @@ -13,7 +13,7 @@ public class RepairRun { /** * An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt - * deps", "dbt seed", "dbt run"]` + * deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]` */ @JsonProperty("dbt_commands") private Collection dbtCommands; @@ -25,10 +25,8 @@ public class RepairRun { * cannot be specified in conjunction with notebook_params. The JSON representation of this field * (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. * - *

Use [task parameter variables] such as `{{job.id}}` to pass context about job runs. - * - *

[task parameter variables]: - * https://docs.databricks.com/workflows/jobs/parameter-value-references.html + *

Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters + * containing information about job runs. */ @JsonProperty("jar_params") private Collection jarParams; @@ -53,14 +51,13 @@ public class RepairRun { * *

notebook_params cannot be specified in conjunction with jar_params. * - *

Use [task parameter variables] such as `{{job.id}}` to pass context about job runs. + *

Use [Task parameter variables] to set parameters containing information about job runs. * *

The JSON representation of this field (for example `{"notebook_params":{"name":"john * doe","age":"35"}}`) cannot exceed 10,000 bytes. * - *

[dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html [task - * parameter variables]: - * https://docs.databricks.com/workflows/jobs/parameter-value-references.html + *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables + * [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html */ @JsonProperty("notebook_params") private Map notebookParams; @@ -82,7 +79,7 @@ public class RepairRun { * `run-now`, it would overwrite the parameters specified in job setting. The JSON representation * of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. * - *

Use [task parameter variables] such as `{{job.id}}` to pass context about job runs. + *

Use [Task parameter variables] to set parameters containing information about job runs. * *

Important * @@ -90,8 +87,7 @@ public class RepairRun { * characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese * kanjis, and emojis. * - *

[task parameter variables]: - * https://docs.databricks.com/workflows/jobs/parameter-value-references.html + *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ @JsonProperty("python_params") private Collection pythonParams; @@ -125,7 +121,7 @@ public class RepairRun { * parameters specified in job setting. The JSON representation of this field (for example * `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. * - *

Use [task parameter variables] such as `{{job.id}}` to pass context about job runs. + *

Use [Task parameter variables] to set parameters containing information about job runs * *

Important * @@ -133,8 +129,7 @@ public class RepairRun { * characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese * kanjis, and emojis. * - *

[task parameter variables]: - * https://docs.databricks.com/workflows/jobs/parameter-value-references.html + *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ @JsonProperty("spark_submit_params") private Collection sparkSubmitParams; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRunResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRunResponse.java index afb46a3c0..abfe23597 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRunResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RepairRunResponse.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Run repair was initiated. */ @Generated public class RepairRunResponse { /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedRunJobTaskValues.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedRunJobTaskValues.java index 9d49b1886..508176877 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedRunJobTaskValues.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ResolvedRunJobTaskValues.java @@ -11,20 +11,20 @@ @Generated public class ResolvedRunJobTaskValues { /** */ - @JsonProperty("named_parameters") - private Map namedParameters; + @JsonProperty("job_parameters") + private Map jobParameters; /** */ @JsonProperty("parameters") private Map parameters; - public ResolvedRunJobTaskValues setNamedParameters(Map namedParameters) { - this.namedParameters = namedParameters; + public ResolvedRunJobTaskValues setJobParameters(Map jobParameters) { + this.jobParameters = jobParameters; return this; } - public Map getNamedParameters() { - return namedParameters; + public Map getJobParameters() { + return jobParameters; } public ResolvedRunJobTaskValues setParameters(Map parameters) { @@ -41,19 +41,19 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ResolvedRunJobTaskValues that = (ResolvedRunJobTaskValues) o; - return Objects.equals(namedParameters, that.namedParameters) + return Objects.equals(jobParameters, that.jobParameters) && Objects.equals(parameters, that.parameters); } @Override public int hashCode() { - return Objects.hash(namedParameters, parameters); + return Objects.hash(jobParameters, parameters); } @Override public String toString() { return new ToStringer(ResolvedRunJobTaskValues.class) - .add("namedParameters", namedParameters) + .add("jobParameters", jobParameters) .add("parameters", parameters) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java index d25a1cecd..57dc74e7f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Run.java @@ -47,6 +47,10 @@ public class Run { @JsonProperty("creator_user_name") private String creatorUserName; + /** Description of the run */ + @JsonProperty("description") + private String description; + /** * The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This * field is set to 0 if the job is still running. @@ -109,6 +113,10 @@ public class Run { @JsonProperty("overriding_parameters") private RunParameters overridingParameters; + /** The time in milliseconds that the run has spent in the queue. */ + @JsonProperty("queue_duration") + private Long queueDuration; + /** The repair history of the run. */ @JsonProperty("repair_history") private Collection repairHistory; @@ -130,9 +138,9 @@ public class Run { private String runPageUrl; /** - * * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. * `WORKFLOW_RUN`: Workflow - * run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit run. A run created with - * :method:jobs/submit. + * The type of a run. * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. * + * `WORKFLOW_RUN`: Workflow run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit + * run. A run created with :method:jobs/submit. * *

[dbutils.notebook.run]: * https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow @@ -187,7 +195,7 @@ public class Run { @JsonProperty("trigger") private TriggerType trigger; - /** */ + /** Additional details about what triggered the run */ @JsonProperty("trigger_info") private TriggerInfo triggerInfo; @@ -236,6 +244,15 @@ public String getCreatorUserName() { return creatorUserName; } + public Run setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + public Run setEndTime(Long endTime) { this.endTime = endTime; return this; @@ -317,6 +334,15 @@ public RunParameters getOverridingParameters() { return overridingParameters; } + public Run setQueueDuration(Long queueDuration) { + this.queueDuration = queueDuration; + return this; + } + + public Long getQueueDuration() { + return queueDuration; + } + public Run setRepairHistory(Collection repairHistory) { this.repairHistory = repairHistory; return this; @@ -444,6 +470,7 @@ public boolean equals(Object o) { && Objects.equals(clusterInstance, that.clusterInstance) && Objects.equals(clusterSpec, that.clusterSpec) && Objects.equals(creatorUserName, that.creatorUserName) + && Objects.equals(description, that.description) && Objects.equals(endTime, that.endTime) && Objects.equals(executionDuration, that.executionDuration) && Objects.equals(gitSource, that.gitSource) @@ -453,6 +480,7 @@ public boolean equals(Object o) { && Objects.equals(numberInJob, that.numberInJob) && Objects.equals(originalAttemptRunId, that.originalAttemptRunId) && Objects.equals(overridingParameters, that.overridingParameters) + && Objects.equals(queueDuration, that.queueDuration) && Objects.equals(repairHistory, that.repairHistory) && Objects.equals(runDuration, that.runDuration) && Objects.equals(runId, that.runId) @@ -476,6 +504,7 @@ public int hashCode() { clusterInstance, clusterSpec, creatorUserName, + description, endTime, executionDuration, gitSource, @@ -485,6 +514,7 @@ public int hashCode() { numberInJob, originalAttemptRunId, overridingParameters, + queueDuration, repairHistory, runDuration, runId, @@ -508,6 +538,7 @@ public String toString() { .add("clusterInstance", clusterInstance) .add("clusterSpec", clusterSpec) .add("creatorUserName", creatorUserName) + .add("description", description) .add("endTime", endTime) .add("executionDuration", executionDuration) .add("gitSource", gitSource) @@ -517,6 +548,7 @@ public String toString() { .add("numberInJob", numberInJob) .add("originalAttemptRunId", originalAttemptRunId) .add("overridingParameters", overridingParameters) + .add("queueDuration", queueDuration) .add("repairHistory", repairHistory) .add("runDuration", runDuration) .add("runId", runId) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunConditionTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunConditionTask.java index 4a8883c4d..8a48eb92d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunConditionTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunConditionTask.java @@ -9,13 +9,25 @@ @Generated public class RunConditionTask { - /** The left operand of the condition task. */ + /** + * The left operand of the condition task. Can be either a string value or a job state or + * parameter reference. + */ @JsonProperty("left") private String left; - /** The condtion task operator. */ + /** + * * `EQUAL_TO`, `NOT_EQUAL` operators perform string comparison of their operands. This means + * that `“12.0” == “12”` will evaluate to `false`. * `GREATER_THAN`, `GREATER_THAN_OR_EQUAL`, + * `LESS_THAN`, `LESS_THAN_OR_EQUAL` operators perform numeric comparison of their operands. + * `“12.0” >= “12”` will evaluate to `true`, `“10.0” >= “12”` will evaluate to `false`. + * + *

The boolean comparison to task values can be implemented with operators `EQUAL_TO`, + * `NOT_EQUAL`. If a task value was set to a boolean value, it will be serialized to `“true”` or + * `“false”` for the comparison. + */ @JsonProperty("op") - private RunConditionTaskOp op; + private ConditionTaskOp op; /** * The condition expression evaluation result. Filled in if the task was successfully completed. @@ -24,7 +36,10 @@ public class RunConditionTask { @JsonProperty("outcome") private String outcome; - /** The right operand of the condition task. */ + /** + * The right operand of the condition task. Can be either a string value or a job state or + * parameter reference. + */ @JsonProperty("right") private String right; @@ -37,12 +52,12 @@ public String getLeft() { return left; } - public RunConditionTask setOp(RunConditionTaskOp op) { + public RunConditionTask setOp(ConditionTaskOp op) { this.op = op; return this; } - public RunConditionTaskOp getOp() { + public ConditionTaskOp getOp() { return op; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunConditionTaskOp.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunConditionTaskOp.java deleted file mode 100755 index 7b0ac03c0..000000000 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunConditionTaskOp.java +++ /dev/null @@ -1,16 +0,0 @@ -// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. - -package com.databricks.sdk.service.jobs; - -import com.databricks.sdk.support.Generated; - -/** The condtion task operator. */ -@Generated -public enum RunConditionTaskOp { - EQUAL_TO, - GREATER_THAN, - GREATER_THAN_OR_EQUAL, - LESS_THAN, - LESS_THAN_OR_EQUAL, - NOT_EQUAL, -} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunForEachTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunForEachTask.java index 754185da0..68a6b8c14 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunForEachTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunForEachTask.java @@ -19,11 +19,14 @@ public class RunForEachTask { @JsonProperty("inputs") private String inputs; - /** */ + /** + * Read only field. Populated for GetRun and ListRuns RPC calls and stores the execution stats of + * an For each task + */ @JsonProperty("stats") private ForEachStats stats; - /** */ + /** Configuration for the task that will be run for each element in the array */ @JsonProperty("task") private Task task; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobTask.java index 072e584da..f31ac3430 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunJobTask.java @@ -5,11 +5,32 @@ import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; import java.util.Map; import java.util.Objects; @Generated public class RunJobTask { + /** + * An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt + * deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]` + */ + @JsonProperty("dbt_commands") + private Collection dbtCommands; + + /** + * A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", + * "35"]`. The parameters are used to invoke the main function of the main class specified in the + * Spark JAR task. If not specified upon `run-now`, it defaults to an empty list. jar_params + * cannot be specified in conjunction with notebook_params. The JSON representation of this field + * (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. + * + *

Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters + * containing information about job runs. + */ + @JsonProperty("jar_params") + private Collection jarParams; + /** ID of the job to trigger. */ @JsonProperty("job_id") private Long jobId; @@ -18,6 +39,101 @@ public class RunJobTask { @JsonProperty("job_parameters") private Map jobParameters; + /** + * A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name": + * "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the + * [dbutils.widgets.get] function. + * + *

If not specified upon `run-now`, the triggered run uses the job’s base parameters. + * + *

notebook_params cannot be specified in conjunction with jar_params. + * + *

Use [Task parameter variables] to set parameters containing information about job runs. + * + *

The JSON representation of this field (for example `{"notebook_params":{"name":"john + * doe","age":"35"}}`) cannot exceed 10,000 bytes. + * + *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables + * [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html + */ + @JsonProperty("notebook_params") + private Map notebookParams; + + /** */ + @JsonProperty("pipeline_params") + private PipelineParams pipelineParams; + + /** + * A map from keys to values for jobs with Python wheel task, for example `"python_named_params": + * {"name": "task", "data": "dbfs:/path/to/data.json"}`. + */ + @JsonProperty("python_named_params") + private Map pythonNamedParams; + + /** + * A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", + * "35"]`. The parameters are passed to Python file as command-line parameters. If specified upon + * `run-now`, it would overwrite the parameters specified in job setting. The JSON representation + * of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. + * + *

Use [Task parameter variables] to set parameters containing information about job runs. + * + *

Important + * + *

These parameters accept only Latin characters (ASCII character set). Using non-ASCII + * characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese + * kanjis, and emojis. + * + *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables + */ + @JsonProperty("python_params") + private Collection pythonParams; + + /** + * A list of parameters for jobs with spark submit task, for example `"spark_submit_params": + * ["--class", "org.apache.spark.examples.SparkPi"]`. The parameters are passed to spark-submit + * script as command-line parameters. If specified upon `run-now`, it would overwrite the + * parameters specified in job setting. The JSON representation of this field (for example + * `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. + * + *

Use [Task parameter variables] to set parameters containing information about job runs + * + *

Important + * + *

These parameters accept only Latin characters (ASCII character set). Using non-ASCII + * characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese + * kanjis, and emojis. + * + *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables + */ + @JsonProperty("spark_submit_params") + private Collection sparkSubmitParams; + + /** + * A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john + * doe", "age": "35"}`. The SQL alert task does not support custom parameters. + */ + @JsonProperty("sql_params") + private Map sqlParams; + + public RunJobTask setDbtCommands(Collection dbtCommands) { + this.dbtCommands = dbtCommands; + return this; + } + + public Collection getDbtCommands() { + return dbtCommands; + } + + public RunJobTask setJarParams(Collection jarParams) { + this.jarParams = jarParams; + return this; + } + + public Collection getJarParams() { + return jarParams; + } + public RunJobTask setJobId(Long jobId) { this.jobId = jobId; return this; @@ -36,24 +152,105 @@ public Map getJobParameters() { return jobParameters; } + public RunJobTask setNotebookParams(Map notebookParams) { + this.notebookParams = notebookParams; + return this; + } + + public Map getNotebookParams() { + return notebookParams; + } + + public RunJobTask setPipelineParams(PipelineParams pipelineParams) { + this.pipelineParams = pipelineParams; + return this; + } + + public PipelineParams getPipelineParams() { + return pipelineParams; + } + + public RunJobTask setPythonNamedParams(Map pythonNamedParams) { + this.pythonNamedParams = pythonNamedParams; + return this; + } + + public Map getPythonNamedParams() { + return pythonNamedParams; + } + + public RunJobTask setPythonParams(Collection pythonParams) { + this.pythonParams = pythonParams; + return this; + } + + public Collection getPythonParams() { + return pythonParams; + } + + public RunJobTask setSparkSubmitParams(Collection sparkSubmitParams) { + this.sparkSubmitParams = sparkSubmitParams; + return this; + } + + public Collection getSparkSubmitParams() { + return sparkSubmitParams; + } + + public RunJobTask setSqlParams(Map sqlParams) { + this.sqlParams = sqlParams; + return this; + } + + public Map getSqlParams() { + return sqlParams; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; RunJobTask that = (RunJobTask) o; - return Objects.equals(jobId, that.jobId) && Objects.equals(jobParameters, that.jobParameters); + return Objects.equals(dbtCommands, that.dbtCommands) + && Objects.equals(jarParams, that.jarParams) + && Objects.equals(jobId, that.jobId) + && Objects.equals(jobParameters, that.jobParameters) + && Objects.equals(notebookParams, that.notebookParams) + && Objects.equals(pipelineParams, that.pipelineParams) + && Objects.equals(pythonNamedParams, that.pythonNamedParams) + && Objects.equals(pythonParams, that.pythonParams) + && Objects.equals(sparkSubmitParams, that.sparkSubmitParams) + && Objects.equals(sqlParams, that.sqlParams); } @Override public int hashCode() { - return Objects.hash(jobId, jobParameters); + return Objects.hash( + dbtCommands, + jarParams, + jobId, + jobParameters, + notebookParams, + pipelineParams, + pythonNamedParams, + pythonParams, + sparkSubmitParams, + sqlParams); } @Override public String toString() { return new ToStringer(RunJobTask.class) + .add("dbtCommands", dbtCommands) + .add("jarParams", jarParams) .add("jobId", jobId) .add("jobParameters", jobParameters) + .add("notebookParams", notebookParams) + .add("pipelineParams", pipelineParams) + .add("pythonNamedParams", pythonNamedParams) + .add("pythonParams", pythonParams) + .add("sparkSubmitParams", sparkSubmitParams) + .add("sqlParams", sqlParams) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java index 9b0598fae..92c428763 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNow.java @@ -13,7 +13,7 @@ public class RunNow { /** * An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt - * deps", "dbt seed", "dbt run"]` + * deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]` */ @JsonProperty("dbt_commands") private Collection dbtCommands; @@ -42,10 +42,8 @@ public class RunNow { * cannot be specified in conjunction with notebook_params. The JSON representation of this field * (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. * - *

Use [task parameter variables] such as `{{job.id}}` to pass context about job runs. - * - *

[task parameter variables]: - * https://docs.databricks.com/workflows/jobs/parameter-value-references.html + *

Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters + * containing information about job runs. */ @JsonProperty("jar_params") private Collection jarParams; @@ -67,14 +65,13 @@ public class RunNow { * *

notebook_params cannot be specified in conjunction with jar_params. * - *

Use [task parameter variables] such as `{{job.id}}` to pass context about job runs. + *

Use [Task parameter variables] to set parameters containing information about job runs. * *

The JSON representation of this field (for example `{"notebook_params":{"name":"john * doe","age":"35"}}`) cannot exceed 10,000 bytes. * - *

[dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html [task - * parameter variables]: - * https://docs.databricks.com/workflows/jobs/parameter-value-references.html + *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables + * [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html */ @JsonProperty("notebook_params") private Map notebookParams; @@ -96,7 +93,7 @@ public class RunNow { * `run-now`, it would overwrite the parameters specified in job setting. The JSON representation * of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. * - *

Use [task parameter variables] such as `{{job.id}}` to pass context about job runs. + *

Use [Task parameter variables] to set parameters containing information about job runs. * *

Important * @@ -104,8 +101,7 @@ public class RunNow { * characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese * kanjis, and emojis. * - *

[task parameter variables]: - * https://docs.databricks.com/workflows/jobs/parameter-value-references.html + *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ @JsonProperty("python_params") private Collection pythonParams; @@ -121,7 +117,7 @@ public class RunNow { * parameters specified in job setting. The JSON representation of this field (for example * `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. * - *

Use [task parameter variables] such as `{{job.id}}` to pass context about job runs. + *

Use [Task parameter variables] to set parameters containing information about job runs * *

Important * @@ -129,8 +125,7 @@ public class RunNow { * characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese * kanjis, and emojis. * - *

[task parameter variables]: - * https://docs.databricks.com/workflows/jobs/parameter-value-references.html + *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ @JsonProperty("spark_submit_params") private Collection sparkSubmitParams; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNowResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNowResponse.java index c5e1d48b4..41460356d 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNowResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunNowResponse.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Run was started successfully. */ @Generated public class RunNowResponse { /** A unique identifier for this job run. This is set to the same value as `run_id`. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutput.java index 05c16ca6b..ebfe9b747 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunOutput.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Run output was retrieved successfully. */ @Generated public class RunOutput { /** The output of a dbt task, if available. */ @@ -24,6 +25,10 @@ public class RunOutput { @JsonProperty("error_trace") private String errorTrace; + /** */ + @JsonProperty("info") + private String info; + /** * The output from tasks that write to standard streams (stdout/stderr) such as spark_jar_task, * spark_python_task, python_wheel_task. @@ -46,10 +51,12 @@ public class RunOutput { /** * The output of a notebook task, if available. A notebook task that terminates (either * successfully or with a failure) without calling `dbutils.notebook.exit()` is considered to have - * an empty output. This field is set but its result value is empty. restricts this - * API to return the first 5 MB of the output. To return a larger result, use the - * [ClusterLogConf](/dev-tools/api/latest/clusters.html#clusterlogconf) field to configure log - * storage for the job cluster. + * an empty output. This field is set but its result value is empty. Databricks restricts this API + * to return the first 5 MB of the output. To return a larger result, use the [ClusterLogConf] + * field to configure log storage for the job cluster. + * + *

[ClusterLogConf]: + * https://docs.databricks.com/dev-tools/api/latest/clusters.html#clusterlogconf */ @JsonProperty("notebook_output") private NotebookOutput notebookOutput; @@ -89,6 +96,15 @@ public String getErrorTrace() { return errorTrace; } + public RunOutput setInfo(String info) { + this.info = info; + return this; + } + + public String getInfo() { + return info; + } + public RunOutput setLogs(String logs) { this.logs = logs; return this; @@ -151,6 +167,7 @@ public boolean equals(Object o) { return Objects.equals(dbtOutput, that.dbtOutput) && Objects.equals(error, that.error) && Objects.equals(errorTrace, that.errorTrace) + && Objects.equals(info, that.info) && Objects.equals(logs, that.logs) && Objects.equals(logsTruncated, that.logsTruncated) && Objects.equals(metadata, that.metadata) @@ -165,6 +182,7 @@ public int hashCode() { dbtOutput, error, errorTrace, + info, logs, logsTruncated, metadata, @@ -179,6 +197,7 @@ public String toString() { .add("dbtOutput", dbtOutput) .add("error", error) .add("errorTrace", errorTrace) + .add("info", info) .add("logs", logs) .add("logsTruncated", logsTruncated) .add("metadata", metadata) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunParameters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunParameters.java index 0f1fd99b0..9fff89385 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunParameters.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunParameters.java @@ -13,7 +13,7 @@ public class RunParameters { /** * An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt - * deps", "dbt seed", "dbt run"]` + * deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]` */ @JsonProperty("dbt_commands") private Collection dbtCommands; @@ -25,18 +25,12 @@ public class RunParameters { * cannot be specified in conjunction with notebook_params. The JSON representation of this field * (for example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. * - *

Use [task parameter variables] such as `{{job.id}}` to pass context about job runs. - * - *

[task parameter variables]: - * https://docs.databricks.com/workflows/jobs/parameter-value-references.html + *

Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters + * containing information about job runs. */ @JsonProperty("jar_params") private Collection jarParams; - /** Job-level parameters used in the run. for example `"param": "overriding_val"` */ - @JsonProperty("job_parameters") - private Map jobParameters; - /** * A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name": * "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the @@ -46,14 +40,13 @@ public class RunParameters { * *

notebook_params cannot be specified in conjunction with jar_params. * - *

Use [task parameter variables] such as `{{job.id}}` to pass context about job runs. + *

Use [Task parameter variables] to set parameters containing information about job runs. * *

The JSON representation of this field (for example `{"notebook_params":{"name":"john * doe","age":"35"}}`) cannot exceed 10,000 bytes. * - *

[dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html [task - * parameter variables]: - * https://docs.databricks.com/workflows/jobs/parameter-value-references.html + *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables + * [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html */ @JsonProperty("notebook_params") private Map notebookParams; @@ -75,7 +68,7 @@ public class RunParameters { * `run-now`, it would overwrite the parameters specified in job setting. The JSON representation * of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. * - *

Use [task parameter variables] such as `{{job.id}}` to pass context about job runs. + *

Use [Task parameter variables] to set parameters containing information about job runs. * *

Important * @@ -83,8 +76,7 @@ public class RunParameters { * characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese * kanjis, and emojis. * - *

[task parameter variables]: - * https://docs.databricks.com/workflows/jobs/parameter-value-references.html + *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ @JsonProperty("python_params") private Collection pythonParams; @@ -96,7 +88,7 @@ public class RunParameters { * parameters specified in job setting. The JSON representation of this field (for example * `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. * - *

Use [task parameter variables] such as `{{job.id}}` to pass context about job runs. + *

Use [Task parameter variables] to set parameters containing information about job runs * *

Important * @@ -104,8 +96,7 @@ public class RunParameters { * characters returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese * kanjis, and emojis. * - *

[task parameter variables]: - * https://docs.databricks.com/workflows/jobs/parameter-value-references.html + *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ @JsonProperty("spark_submit_params") private Collection sparkSubmitParams; @@ -135,15 +126,6 @@ public Collection getJarParams() { return jarParams; } - public RunParameters setJobParameters(Map jobParameters) { - this.jobParameters = jobParameters; - return this; - } - - public Map getJobParameters() { - return jobParameters; - } - public RunParameters setNotebookParams(Map notebookParams) { this.notebookParams = notebookParams; return this; @@ -205,7 +187,6 @@ public boolean equals(Object o) { RunParameters that = (RunParameters) o; return Objects.equals(dbtCommands, that.dbtCommands) && Objects.equals(jarParams, that.jarParams) - && Objects.equals(jobParameters, that.jobParameters) && Objects.equals(notebookParams, that.notebookParams) && Objects.equals(pipelineParams, that.pipelineParams) && Objects.equals(pythonNamedParams, that.pythonNamedParams) @@ -219,7 +200,6 @@ public int hashCode() { return Objects.hash( dbtCommands, jarParams, - jobParameters, notebookParams, pipelineParams, pythonNamedParams, @@ -233,7 +213,6 @@ public String toString() { return new ToStringer(RunParameters.class) .add("dbtCommands", dbtCommands) .add("jarParams", jarParams) - .add("jobParameters", jobParameters) .add("notebookParams", notebookParams) .add("pipelineParams", pipelineParams) .add("pythonNamedParams", pythonNamedParams) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java index 616039421..4a46148a5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunTask.java @@ -8,6 +8,7 @@ import java.util.Collection; import java.util.Objects; +/** Used when outputting a child run, in GetRun or ListRuns. */ @Generated public class RunTask { /** @@ -63,6 +64,13 @@ public class RunTask { @JsonProperty("description") private String description; + /** + * An optional set of email addresses notified when the task run begins or completes. The default + * behavior is to not send any emails. + */ + @JsonProperty("email_notifications") + private JobEmailNotifications emailNotifications; + /** * The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This * field is set to 0 if the job is still running. @@ -81,9 +89,9 @@ public class RunTask { private Long executionDuration; /** - * If existing_cluster_id, the ID of an existing cluster that is used for all runs of this job. - * When running jobs on an existing cluster, you may need to manually restart the cluster if it - * stops responding. We suggest running jobs on new clusters for greater reliability. + * If existing_cluster_id, the ID of an existing cluster that is used for all runs. When running + * jobs or tasks on an existing cluster, you may need to manually restart the cluster if it stops + * responding. We suggest running jobs and tasks on new clusters for greater reliability */ @JsonProperty("existing_cluster_id") private String existingClusterId; @@ -95,36 +103,47 @@ public class RunTask { /** * An optional specification for a remote Git repository containing the source code used by tasks. * Version-controlled source code is supported by notebook, dbt, Python script, and SQL File - * tasks. - * - *

If `git_source` is set, these tasks retrieve the file from the remote repository by default. - * However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task. - * - *

Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File + * tasks. If `git_source` is set, these tasks retrieve the file from the remote repository by + * default. However, this behavior can be overridden by setting `source` to `WORKSPACE` on the + * task. Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File * tasks are used, `git_source` must be defined on the job. */ @JsonProperty("git_source") private GitSource gitSource; /** - * An optional list of libraries to be installed on the cluster that executes the job. The default - * value is an empty list. + * If job_cluster_key, this task is executed reusing the cluster specified in + * `job.settings.job_clusters`. + */ + @JsonProperty("job_cluster_key") + private String jobClusterKey; + + /** + * An optional list of libraries to be installed on the cluster. The default value is an empty + * list. */ @JsonProperty("libraries") private Collection libraries; - /** If new_cluster, a description of a new cluster that is created only for this task. */ + /** If new_cluster, a description of a new cluster that is created for each run. */ @JsonProperty("new_cluster") private com.databricks.sdk.service.compute.ClusterSpec newCluster; /** - * If notebook_task, indicates that this job must run a notebook. This field may not be specified + * If notebook_task, indicates that this task must run a notebook. This field may not be specified * in conjunction with spark_jar_task. */ @JsonProperty("notebook_task") private NotebookTask notebookTask; - /** If pipeline_task, indicates that this job must execute a Pipeline. */ + /** + * Optional notification settings that are used when sending notifications to each of the + * `email_notifications` and `webhook_notifications` for this task run. + */ + @JsonProperty("notification_settings") + private TaskNotificationSettings notificationSettings; + + /** If pipeline_task, indicates that this task must execute a Pipeline. */ @JsonProperty("pipeline_task") private PipelineTask pipelineTask; @@ -140,6 +159,10 @@ public class RunTask { @JsonProperty("resolved_values") private ResolvedValues resolvedValues; + /** The time in milliseconds it took the job run and all of its repairs to finish. */ + @JsonProperty("run_duration") + private Long runDuration; + /** The ID of the task run. */ @JsonProperty("run_id") private Long runId; @@ -156,6 +179,10 @@ public class RunTask { @JsonProperty("run_job_task") private RunJobTask runJobTask; + /** */ + @JsonProperty("run_page_url") + private String runPageUrl; + /** * The time in milliseconds it took to set up the cluster. For runs that run on new clusters this * is the cluster creation time, for runs that run on existing clusters this time should be very @@ -166,11 +193,11 @@ public class RunTask { @JsonProperty("setup_duration") private Long setupDuration; - /** If spark_jar_task, indicates that this job must run a JAR. */ + /** If spark_jar_task, indicates that this task must run a JAR. */ @JsonProperty("spark_jar_task") private SparkJarTask sparkJarTask; - /** If spark_python_task, indicates that this job must run a Python file. */ + /** If spark_python_task, indicates that this task must run a Python file. */ @JsonProperty("spark_python_task") private SparkPythonTask sparkPythonTask; @@ -194,7 +221,7 @@ public class RunTask { @JsonProperty("spark_submit_task") private SparkSubmitTask sparkSubmitTask; - /** If sql_task, indicates that this job must execute a SQL. */ + /** If sql_task, indicates that this job must execute a SQL task. */ @JsonProperty("sql_task") private SqlTask sqlTask; @@ -218,6 +245,18 @@ public class RunTask { @JsonProperty("task_key") private String taskKey; + /** An optional timeout applied to each run of this job task. A value of `0` means no timeout. */ + @JsonProperty("timeout_seconds") + private Long timeoutSeconds; + + /** + * A collection of system notification IDs to notify when the run begins or completes. The default + * behavior is to not send any system notifications. Task webhooks respect the task notification + * settings. + */ + @JsonProperty("webhook_notifications") + private WebhookNotifications webhookNotifications; + public RunTask setAttemptNumber(Long attemptNumber) { this.attemptNumber = attemptNumber; return this; @@ -281,6 +320,15 @@ public String getDescription() { return description; } + public RunTask setEmailNotifications(JobEmailNotifications emailNotifications) { + this.emailNotifications = emailNotifications; + return this; + } + + public JobEmailNotifications getEmailNotifications() { + return emailNotifications; + } + public RunTask setEndTime(Long endTime) { this.endTime = endTime; return this; @@ -326,6 +374,15 @@ public GitSource getGitSource() { return gitSource; } + public RunTask setJobClusterKey(String jobClusterKey) { + this.jobClusterKey = jobClusterKey; + return this; + } + + public String getJobClusterKey() { + return jobClusterKey; + } + public RunTask setLibraries(Collection libraries) { this.libraries = libraries; return this; @@ -353,6 +410,15 @@ public NotebookTask getNotebookTask() { return notebookTask; } + public RunTask setNotificationSettings(TaskNotificationSettings notificationSettings) { + this.notificationSettings = notificationSettings; + return this; + } + + public TaskNotificationSettings getNotificationSettings() { + return notificationSettings; + } + public RunTask setPipelineTask(PipelineTask pipelineTask) { this.pipelineTask = pipelineTask; return this; @@ -389,6 +455,15 @@ public ResolvedValues getResolvedValues() { return resolvedValues; } + public RunTask setRunDuration(Long runDuration) { + this.runDuration = runDuration; + return this; + } + + public Long getRunDuration() { + return runDuration; + } + public RunTask setRunId(Long runId) { this.runId = runId; return this; @@ -416,6 +491,15 @@ public RunJobTask getRunJobTask() { return runJobTask; } + public RunTask setRunPageUrl(String runPageUrl) { + this.runPageUrl = runPageUrl; + return this; + } + + public String getRunPageUrl() { + return runPageUrl; + } + public RunTask setSetupDuration(Long setupDuration) { this.setupDuration = setupDuration; return this; @@ -488,6 +572,24 @@ public String getTaskKey() { return taskKey; } + public RunTask setTimeoutSeconds(Long timeoutSeconds) { + this.timeoutSeconds = timeoutSeconds; + return this; + } + + public Long getTimeoutSeconds() { + return timeoutSeconds; + } + + public RunTask setWebhookNotifications(WebhookNotifications webhookNotifications) { + this.webhookNotifications = webhookNotifications; + return this; + } + + public WebhookNotifications getWebhookNotifications() { + return webhookNotifications; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -500,21 +602,26 @@ public boolean equals(Object o) { && Objects.equals(dbtTask, that.dbtTask) && Objects.equals(dependsOn, that.dependsOn) && Objects.equals(description, that.description) + && Objects.equals(emailNotifications, that.emailNotifications) && Objects.equals(endTime, that.endTime) && Objects.equals(executionDuration, that.executionDuration) && Objects.equals(existingClusterId, that.existingClusterId) && Objects.equals(forEachTask, that.forEachTask) && Objects.equals(gitSource, that.gitSource) + && Objects.equals(jobClusterKey, that.jobClusterKey) && Objects.equals(libraries, that.libraries) && Objects.equals(newCluster, that.newCluster) && Objects.equals(notebookTask, that.notebookTask) + && Objects.equals(notificationSettings, that.notificationSettings) && Objects.equals(pipelineTask, that.pipelineTask) && Objects.equals(pythonWheelTask, that.pythonWheelTask) && Objects.equals(queueDuration, that.queueDuration) && Objects.equals(resolvedValues, that.resolvedValues) + && Objects.equals(runDuration, that.runDuration) && Objects.equals(runId, that.runId) && Objects.equals(runIf, that.runIf) && Objects.equals(runJobTask, that.runJobTask) + && Objects.equals(runPageUrl, that.runPageUrl) && Objects.equals(setupDuration, that.setupDuration) && Objects.equals(sparkJarTask, that.sparkJarTask) && Objects.equals(sparkPythonTask, that.sparkPythonTask) @@ -522,7 +629,9 @@ public boolean equals(Object o) { && Objects.equals(sqlTask, that.sqlTask) && Objects.equals(startTime, that.startTime) && Objects.equals(state, that.state) - && Objects.equals(taskKey, that.taskKey); + && Objects.equals(taskKey, that.taskKey) + && Objects.equals(timeoutSeconds, that.timeoutSeconds) + && Objects.equals(webhookNotifications, that.webhookNotifications); } @Override @@ -535,21 +644,26 @@ public int hashCode() { dbtTask, dependsOn, description, + emailNotifications, endTime, executionDuration, existingClusterId, forEachTask, gitSource, + jobClusterKey, libraries, newCluster, notebookTask, + notificationSettings, pipelineTask, pythonWheelTask, queueDuration, resolvedValues, + runDuration, runId, runIf, runJobTask, + runPageUrl, setupDuration, sparkJarTask, sparkPythonTask, @@ -557,7 +671,9 @@ public int hashCode() { sqlTask, startTime, state, - taskKey); + taskKey, + timeoutSeconds, + webhookNotifications); } @Override @@ -570,21 +686,26 @@ public String toString() { .add("dbtTask", dbtTask) .add("dependsOn", dependsOn) .add("description", description) + .add("emailNotifications", emailNotifications) .add("endTime", endTime) .add("executionDuration", executionDuration) .add("existingClusterId", existingClusterId) .add("forEachTask", forEachTask) .add("gitSource", gitSource) + .add("jobClusterKey", jobClusterKey) .add("libraries", libraries) .add("newCluster", newCluster) .add("notebookTask", notebookTask) + .add("notificationSettings", notificationSettings) .add("pipelineTask", pipelineTask) .add("pythonWheelTask", pythonWheelTask) .add("queueDuration", queueDuration) .add("resolvedValues", resolvedValues) + .add("runDuration", runDuration) .add("runId", runId) .add("runIf", runIf) .add("runJobTask", runJobTask) + .add("runPageUrl", runPageUrl) .add("setupDuration", setupDuration) .add("sparkJarTask", sparkJarTask) .add("sparkPythonTask", sparkPythonTask) @@ -593,6 +714,8 @@ public String toString() { .add("startTime", startTime) .add("state", state) .add("taskKey", taskKey) + .add("timeoutSeconds", timeoutSeconds) + .add("webhookNotifications", webhookNotifications) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunType.java index 6c2c93b61..f0392d503 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunType.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunType.java @@ -5,9 +5,9 @@ import com.databricks.sdk.support.Generated; /** - * * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. * `WORKFLOW_RUN`: Workflow - * run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit run. A run created with - * :method:jobs/submit. + * The type of a run. * `JOB_RUN`: Normal job run. A run created with :method:jobs/runNow. * + * `WORKFLOW_RUN`: Workflow run. A run created with [dbutils.notebook.run]. * `SUBMIT_RUN`: Submit + * run. A run created with :method:jobs/submit. * *

[dbutils.notebook.run]: * https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow @@ -16,7 +16,9 @@ public enum RunType { JOB_RUN, // Normal job run. A run created with :method:jobs/runNow. SUBMIT_RUN, // Submit run. A run created with :method:jobs/submit. - WORKFLOW_RUN, // Workflow run. A run created with - // [dbutils.notebook.run](/dev-tools/databricks-utils.html#dbutils-workflow). + WORKFLOW_RUN, // Workflow run. A run created with [dbutils.notebook.run]. + // + // [dbutils.notebook.run]: + // https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-workflow } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Source.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Source.java index 287304319..a59c4c4c2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Source.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Source.java @@ -4,8 +4,17 @@ import com.databricks.sdk.support.Generated; +/** + * Optional location type of the SQL file. When set to `WORKSPACE`, the SQL file will be retrieved\ + * from the local Databricks workspace. When set to `GIT`, the SQL file will be retrieved from a Git + * repository defined in `git_source`. If the value is empty, the task will use `GIT` if + * `git_source` is defined and `WORKSPACE` otherwise. + * + *

* `WORKSPACE`: SQL file is located in Databricks workspace. * `GIT`: SQL file is located in + * cloud Git provider. + */ @Generated public enum Source { - GIT, - WORKSPACE, + GIT, // SQL file is located in cloud Git provider. + WORKSPACE, // SQL file is located in workspace. } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkJarTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkJarTask.java index 6e0e445cc..5c2959c91 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkJarTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkJarTask.java @@ -30,10 +30,9 @@ public class SparkJarTask { /** * Parameters passed to the main method. * - *

Use [task parameter variables] such as `{{job.id}}` to pass context about job runs. + *

Use [Task parameter variables] to set parameters containing information about job runs. * - *

[task parameter variables]: - * https://docs.databricks.com/workflows/jobs/parameter-value-references.html + *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ @JsonProperty("parameters") private Collection parameters; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkPythonTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkPythonTask.java index 80b612e60..0c69b9ca9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkPythonTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkPythonTask.java @@ -13,10 +13,9 @@ public class SparkPythonTask { /** * Command line parameters passed to the Python file. * - *

Use [task parameter variables] such as `{{job.id}}` to pass context about job runs. + *

Use [Task parameter variables] to set parameters containing information about job runs. * - *

[task parameter variables]: - * https://docs.databricks.com/workflows/jobs/parameter-value-references.html + *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ @JsonProperty("parameters") private Collection parameters; @@ -32,12 +31,12 @@ public class SparkPythonTask { /** * Optional location type of the Python file. When set to `WORKSPACE` or not specified, the file - * will be retrieved from the local workspace or cloud location (if the `python_file` + * will be retrieved from the local Databricks workspace or cloud location (if the `python_file` * has a URI format). When set to `GIT`, the Python file will be retrieved from a Git repository * defined in `git_source`. * - *

* `WORKSPACE`: The Python file is located in a workspace or at a cloud - * filesystem URI. * `GIT`: The Python file is located in a remote Git repository. + *

* `WORKSPACE`: The Python file is located in a Databricks workspace or at a cloud filesystem + * URI. * `GIT`: The Python file is located in a remote Git repository. */ @JsonProperty("source") private Source source; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkSubmitTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkSubmitTask.java index 9f5c42cb3..422b67934 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkSubmitTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SparkSubmitTask.java @@ -13,10 +13,9 @@ public class SparkSubmitTask { /** * Command-line parameters passed to spark submit. * - *

Use [task parameter variables] such as `{{job.id}}` to pass context about job runs. + *

Use [Task parameter variables] to set parameters containing information about job runs. * - *

[task parameter variables]: - * https://docs.databricks.com/workflows/jobs/parameter-value-references.html + *

[Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables */ @JsonProperty("parameters") private Collection parameters; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlDashboardWidgetOutputStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlDashboardWidgetOutputStatus.java index 1fc548ff0..275e13ceb 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlDashboardWidgetOutputStatus.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlDashboardWidgetOutputStatus.java @@ -4,7 +4,6 @@ import com.databricks.sdk.support.Generated; -/** The execution status of the SQL widget. */ @Generated public enum SqlDashboardWidgetOutputStatus { CANCELLED, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlQueryOutput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlQueryOutput.java index 2674b8eb2..d540662e4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlQueryOutput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlQueryOutput.java @@ -10,6 +10,10 @@ @Generated public class SqlQueryOutput { + /** */ + @JsonProperty("endpoint_id") + private String endpointId; + /** The link to find the output results. */ @JsonProperty("output_link") private String outputLink; @@ -28,6 +32,15 @@ public class SqlQueryOutput { @JsonProperty("warehouse_id") private String warehouseId; + public SqlQueryOutput setEndpointId(String endpointId) { + this.endpointId = endpointId; + return this; + } + + public String getEndpointId() { + return endpointId; + } + public SqlQueryOutput setOutputLink(String outputLink) { this.outputLink = outputLink; return this; @@ -69,7 +82,8 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; SqlQueryOutput that = (SqlQueryOutput) o; - return Objects.equals(outputLink, that.outputLink) + return Objects.equals(endpointId, that.endpointId) + && Objects.equals(outputLink, that.outputLink) && Objects.equals(queryText, that.queryText) && Objects.equals(sqlStatements, that.sqlStatements) && Objects.equals(warehouseId, that.warehouseId); @@ -77,12 +91,13 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(outputLink, queryText, sqlStatements, warehouseId); + return Objects.hash(endpointId, outputLink, queryText, sqlStatements, warehouseId); } @Override public String toString() { return new ToStringer(SqlQueryOutput.class) + .add("endpointId", endpointId) .add("outputLink", outputLink) .add("queryText", queryText) .add("sqlStatements", sqlStatements) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTask.java index cac013640..7522a3bc5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTask.java @@ -18,11 +18,7 @@ public class SqlTask { @JsonProperty("dashboard") private SqlTaskDashboard dashboard; - /** - * If file, indicates that this job runs a SQL file in a remote Git repository. Only one SQL - * statement is supported in a file. Multiple SQL statements separated by semicolons (;) are not - * permitted. - */ + /** If file, indicates that this job runs a SQL file in a remote Git repository. */ @JsonProperty("file") private SqlTaskFile file; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskFile.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskFile.java index 0c40faa16..0560efe12 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskFile.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SqlTaskFile.java @@ -18,12 +18,12 @@ public class SqlTaskFile { /** * Optional location type of the SQL file. When set to `WORKSPACE`, the SQL file will be retrieved - * from the local workspace. When set to `GIT`, the SQL file will be retrieved from a + * from the local Databricks workspace. When set to `GIT`, the SQL file will be retrieved from a * Git repository defined in `git_source`. If the value is empty, the task will use `GIT` if * `git_source` is defined and `WORKSPACE` otherwise. * - *

* `WORKSPACE`: SQL file is located in workspace. * `GIT`: SQL file is located - * in cloud Git provider. + *

* `WORKSPACE`: SQL file is located in Databricks workspace. * `GIT`: SQL file is located in + * cloud Git provider. */ @JsonProperty("source") private Source source; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java index 5b547374f..f9602d388 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRun.java @@ -14,6 +14,21 @@ public class SubmitRun { @JsonProperty("access_control_list") private Collection accessControlList; + /** + * If condition_task, specifies a condition with an outcome that can be used to control the + * execution of other tasks. Does not require a cluster to execute and does not support retries or + * notifications. + */ + @JsonProperty("condition_task") + private ConditionTask conditionTask; + + /** + * If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and + * the ability to use a serverless or a pro SQL warehouse. + */ + @JsonProperty("dbt_task") + private DbtTask dbtTask; + /** An optional set of email addresses notified when the run begins or completes. */ @JsonProperty("email_notifications") private JobEmailNotifications emailNotifications; @@ -54,6 +69,13 @@ public class SubmitRun { @JsonProperty("idempotency_token") private String idempotencyToken; + /** + * If notebook_task, indicates that this task must run a notebook. This field may not be specified + * in conjunction with spark_jar_task. + */ + @JsonProperty("notebook_task") + private NotebookTask notebookTask; + /** * Optional notification settings that are used when sending notifications to each of the * `email_notifications` and `webhook_notifications` for this run. @@ -61,14 +83,65 @@ public class SubmitRun { @JsonProperty("notification_settings") private JobNotificationSettings notificationSettings; + /** If pipeline_task, indicates that this task must execute a Pipeline. */ + @JsonProperty("pipeline_task") + private PipelineTask pipelineTask; + + /** If python_wheel_task, indicates that this job must execute a PythonWheel. */ + @JsonProperty("python_wheel_task") + private PythonWheelTask pythonWheelTask; + /** The queue settings of the one-time run. */ @JsonProperty("queue") private QueueSettings queue; + /** + * Specifies the user or service principal that the job runs as. If not specified, the job runs as + * the user who submits the request. + */ + @JsonProperty("run_as") + private JobRunAs runAs; + + /** If run_job_task, indicates that this task must execute another job. */ + @JsonProperty("run_job_task") + private RunJobTask runJobTask; + /** An optional name for the run. The default value is `Untitled`. */ @JsonProperty("run_name") private String runName; + /** If spark_jar_task, indicates that this task must run a JAR. */ + @JsonProperty("spark_jar_task") + private SparkJarTask sparkJarTask; + + /** If spark_python_task, indicates that this task must run a Python file. */ + @JsonProperty("spark_python_task") + private SparkPythonTask sparkPythonTask; + + /** + * If `spark_submit_task`, indicates that this task must be launched by the spark submit script. + * This task can run only on new clusters. + * + *

In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, + * use `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark + * configurations. + * + *

`master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; + * you _cannot_ specify them in parameters. + * + *

By default, the Spark submit job uses all available memory (excluding reserved memory for + * Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value + * to leave some room for off-heap usage. + * + *

The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths. + */ + @JsonProperty("spark_submit_task") + private SparkSubmitTask sparkSubmitTask; + + /** If sql_task, indicates that this job must execute a SQL task. */ + @JsonProperty("sql_task") + private SqlTask sqlTask; + /** */ @JsonProperty("tasks") private Collection tasks; @@ -91,6 +164,24 @@ public Collection getAccess return accessControlList; } + public SubmitRun setConditionTask(ConditionTask conditionTask) { + this.conditionTask = conditionTask; + return this; + } + + public ConditionTask getConditionTask() { + return conditionTask; + } + + public SubmitRun setDbtTask(DbtTask dbtTask) { + this.dbtTask = dbtTask; + return this; + } + + public DbtTask getDbtTask() { + return dbtTask; + } + public SubmitRun setEmailNotifications(JobEmailNotifications emailNotifications) { this.emailNotifications = emailNotifications; return this; @@ -127,6 +218,15 @@ public String getIdempotencyToken() { return idempotencyToken; } + public SubmitRun setNotebookTask(NotebookTask notebookTask) { + this.notebookTask = notebookTask; + return this; + } + + public NotebookTask getNotebookTask() { + return notebookTask; + } + public SubmitRun setNotificationSettings(JobNotificationSettings notificationSettings) { this.notificationSettings = notificationSettings; return this; @@ -136,6 +236,24 @@ public JobNotificationSettings getNotificationSettings() { return notificationSettings; } + public SubmitRun setPipelineTask(PipelineTask pipelineTask) { + this.pipelineTask = pipelineTask; + return this; + } + + public PipelineTask getPipelineTask() { + return pipelineTask; + } + + public SubmitRun setPythonWheelTask(PythonWheelTask pythonWheelTask) { + this.pythonWheelTask = pythonWheelTask; + return this; + } + + public PythonWheelTask getPythonWheelTask() { + return pythonWheelTask; + } + public SubmitRun setQueue(QueueSettings queue) { this.queue = queue; return this; @@ -145,6 +263,24 @@ public QueueSettings getQueue() { return queue; } + public SubmitRun setRunAs(JobRunAs runAs) { + this.runAs = runAs; + return this; + } + + public JobRunAs getRunAs() { + return runAs; + } + + public SubmitRun setRunJobTask(RunJobTask runJobTask) { + this.runJobTask = runJobTask; + return this; + } + + public RunJobTask getRunJobTask() { + return runJobTask; + } + public SubmitRun setRunName(String runName) { this.runName = runName; return this; @@ -154,6 +290,42 @@ public String getRunName() { return runName; } + public SubmitRun setSparkJarTask(SparkJarTask sparkJarTask) { + this.sparkJarTask = sparkJarTask; + return this; + } + + public SparkJarTask getSparkJarTask() { + return sparkJarTask; + } + + public SubmitRun setSparkPythonTask(SparkPythonTask sparkPythonTask) { + this.sparkPythonTask = sparkPythonTask; + return this; + } + + public SparkPythonTask getSparkPythonTask() { + return sparkPythonTask; + } + + public SubmitRun setSparkSubmitTask(SparkSubmitTask sparkSubmitTask) { + this.sparkSubmitTask = sparkSubmitTask; + return this; + } + + public SparkSubmitTask getSparkSubmitTask() { + return sparkSubmitTask; + } + + public SubmitRun setSqlTask(SqlTask sqlTask) { + this.sqlTask = sqlTask; + return this; + } + + public SqlTask getSqlTask() { + return sqlTask; + } + public SubmitRun setTasks(Collection tasks) { this.tasks = tasks; return this; @@ -187,13 +359,24 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; SubmitRun that = (SubmitRun) o; return Objects.equals(accessControlList, that.accessControlList) + && Objects.equals(conditionTask, that.conditionTask) + && Objects.equals(dbtTask, that.dbtTask) && Objects.equals(emailNotifications, that.emailNotifications) && Objects.equals(gitSource, that.gitSource) && Objects.equals(health, that.health) && Objects.equals(idempotencyToken, that.idempotencyToken) + && Objects.equals(notebookTask, that.notebookTask) && Objects.equals(notificationSettings, that.notificationSettings) + && Objects.equals(pipelineTask, that.pipelineTask) + && Objects.equals(pythonWheelTask, that.pythonWheelTask) && Objects.equals(queue, that.queue) + && Objects.equals(runAs, that.runAs) + && Objects.equals(runJobTask, that.runJobTask) && Objects.equals(runName, that.runName) + && Objects.equals(sparkJarTask, that.sparkJarTask) + && Objects.equals(sparkPythonTask, that.sparkPythonTask) + && Objects.equals(sparkSubmitTask, that.sparkSubmitTask) + && Objects.equals(sqlTask, that.sqlTask) && Objects.equals(tasks, that.tasks) && Objects.equals(timeoutSeconds, that.timeoutSeconds) && Objects.equals(webhookNotifications, that.webhookNotifications); @@ -203,13 +386,24 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( accessControlList, + conditionTask, + dbtTask, emailNotifications, gitSource, health, idempotencyToken, + notebookTask, notificationSettings, + pipelineTask, + pythonWheelTask, queue, + runAs, + runJobTask, runName, + sparkJarTask, + sparkPythonTask, + sparkSubmitTask, + sqlTask, tasks, timeoutSeconds, webhookNotifications); @@ -219,13 +413,24 @@ public int hashCode() { public String toString() { return new ToStringer(SubmitRun.class) .add("accessControlList", accessControlList) + .add("conditionTask", conditionTask) + .add("dbtTask", dbtTask) .add("emailNotifications", emailNotifications) .add("gitSource", gitSource) .add("health", health) .add("idempotencyToken", idempotencyToken) + .add("notebookTask", notebookTask) .add("notificationSettings", notificationSettings) + .add("pipelineTask", pipelineTask) + .add("pythonWheelTask", pythonWheelTask) .add("queue", queue) + .add("runAs", runAs) + .add("runJobTask", runJobTask) .add("runName", runName) + .add("sparkJarTask", sparkJarTask) + .add("sparkPythonTask", sparkPythonTask) + .add("sparkSubmitTask", sparkSubmitTask) + .add("sqlTask", sqlTask) .add("tasks", tasks) .add("timeoutSeconds", timeoutSeconds) .add("webhookNotifications", webhookNotifications) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRunResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRunResponse.java index 725831ba7..1849bf903 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRunResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitRunResponse.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Run was created and started successfully. */ @Generated public class SubmitRunResponse { /** The canonical identifier for the newly submitted run. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java index 0241b7bf2..745b41fde 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/SubmitTask.java @@ -26,6 +26,10 @@ public class SubmitTask { @JsonProperty("depends_on") private Collection dependsOn; + /** An optional description for this task. */ + @JsonProperty("description") + private String description; + /** * An optional set of email addresses notified when the task run begins or completes. The default * behavior is to not send any emails. @@ -34,18 +38,14 @@ public class SubmitTask { private JobEmailNotifications emailNotifications; /** - * If existing_cluster_id, the ID of an existing cluster that is used for all runs of this task. - * Only all-purpose clusters are supported. When running tasks on an existing cluster, you may - * need to manually restart the cluster if it stops responding. We suggest running jobs on new - * clusters for greater reliability. + * If existing_cluster_id, the ID of an existing cluster that is used for all runs. When running + * jobs or tasks on an existing cluster, you may need to manually restart the cluster if it stops + * responding. We suggest running jobs and tasks on new clusters for greater reliability */ @JsonProperty("existing_cluster_id") private String existingClusterId; - /** - * If for_each_task, indicates that this must execute the nested task within it for the inputs - * provided. - */ + /** If for_each_task, indicates that this task must execute the nested task within it. */ @JsonProperty("for_each_task") private ForEachTask forEachTask; @@ -54,13 +54,13 @@ public class SubmitTask { private JobsHealthRules health; /** - * An optional list of libraries to be installed on the cluster that executes the task. The - * default value is an empty list. + * An optional list of libraries to be installed on the cluster. The default value is an empty + * list. */ @JsonProperty("libraries") private Collection libraries; - /** If new_cluster, a description of a cluster that is created for each run. */ + /** If new_cluster, a description of a new cluster that is created for each run. */ @JsonProperty("new_cluster") private com.databricks.sdk.service.compute.ClusterSpec newCluster; @@ -94,7 +94,7 @@ public class SubmitTask { @JsonProperty("run_if") private RunIf runIf; - /** If run_job_task, indicates that this job must execute another job. */ + /** If run_job_task, indicates that this task must execute another job. */ @JsonProperty("run_job_task") private RunJobTask runJobTask; @@ -126,7 +126,7 @@ public class SubmitTask { @JsonProperty("spark_submit_task") private SparkSubmitTask sparkSubmitTask; - /** If sql_task, indicates that this job must execute a SQL. */ + /** If sql_task, indicates that this job must execute a SQL task. */ @JsonProperty("sql_task") private SqlTask sqlTask; @@ -168,6 +168,15 @@ public Collection getDependsOn() { return dependsOn; } + public SubmitTask setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + public SubmitTask setEmailNotifications(JobEmailNotifications emailNotifications) { this.emailNotifications = emailNotifications; return this; @@ -346,6 +355,7 @@ public boolean equals(Object o) { SubmitTask that = (SubmitTask) o; return Objects.equals(conditionTask, that.conditionTask) && Objects.equals(dependsOn, that.dependsOn) + && Objects.equals(description, that.description) && Objects.equals(emailNotifications, that.emailNotifications) && Objects.equals(existingClusterId, that.existingClusterId) && Objects.equals(forEachTask, that.forEachTask) @@ -372,6 +382,7 @@ public int hashCode() { return Objects.hash( conditionTask, dependsOn, + description, emailNotifications, existingClusterId, forEachTask, @@ -398,6 +409,7 @@ public String toString() { return new ToStringer(SubmitTask.class) .add("conditionTask", conditionTask) .add("dependsOn", dependsOn) + .add("description", description) .add("emailNotifications", emailNotifications) .add("existingClusterId", existingClusterId) .add("forEachTask", forEachTask) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TableTriggerConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TableUpdateTriggerConfiguration.java similarity index 83% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TableTriggerConfiguration.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TableUpdateTriggerConfiguration.java index ac3233968..db8989c2e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TableTriggerConfiguration.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TableUpdateTriggerConfiguration.java @@ -9,7 +9,7 @@ import java.util.Objects; @Generated -public class TableTriggerConfiguration { +public class TableUpdateTriggerConfiguration { /** The table(s) condition based on which to trigger a job run. */ @JsonProperty("condition") private Condition condition; @@ -36,7 +36,7 @@ public class TableTriggerConfiguration { @JsonProperty("wait_after_last_change_seconds") private Long waitAfterLastChangeSeconds; - public TableTriggerConfiguration setCondition(Condition condition) { + public TableUpdateTriggerConfiguration setCondition(Condition condition) { this.condition = condition; return this; } @@ -45,7 +45,7 @@ public Condition getCondition() { return condition; } - public TableTriggerConfiguration setMinTimeBetweenTriggersSeconds( + public TableUpdateTriggerConfiguration setMinTimeBetweenTriggersSeconds( Long minTimeBetweenTriggersSeconds) { this.minTimeBetweenTriggersSeconds = minTimeBetweenTriggersSeconds; return this; @@ -55,7 +55,7 @@ public Long getMinTimeBetweenTriggersSeconds() { return minTimeBetweenTriggersSeconds; } - public TableTriggerConfiguration setTableNames(Collection tableNames) { + public TableUpdateTriggerConfiguration setTableNames(Collection tableNames) { this.tableNames = tableNames; return this; } @@ -64,7 +64,8 @@ public Collection getTableNames() { return tableNames; } - public TableTriggerConfiguration setWaitAfterLastChangeSeconds(Long waitAfterLastChangeSeconds) { + public TableUpdateTriggerConfiguration setWaitAfterLastChangeSeconds( + Long waitAfterLastChangeSeconds) { this.waitAfterLastChangeSeconds = waitAfterLastChangeSeconds; return this; } @@ -77,7 +78,7 @@ public Long getWaitAfterLastChangeSeconds() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - TableTriggerConfiguration that = (TableTriggerConfiguration) o; + TableUpdateTriggerConfiguration that = (TableUpdateTriggerConfiguration) o; return Objects.equals(condition, that.condition) && Objects.equals(minTimeBetweenTriggersSeconds, that.minTimeBetweenTriggersSeconds) && Objects.equals(tableNames, that.tableNames) @@ -92,7 +93,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(TableTriggerConfiguration.class) + return new ToStringer(TableUpdateTriggerConfiguration.class) .add("condition", condition) .add("minTimeBetweenTriggersSeconds", minTimeBetweenTriggersSeconds) .add("tableNames", tableNames) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java index 4d79b0c2e..0f9026396 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java @@ -10,13 +10,6 @@ @Generated public class Task { - /** - * The key of the compute requirement, specified in `job.settings.compute`, to use for execution - * of this task. - */ - @JsonProperty("compute_key") - private String computeKey; - /** * If condition_task, specifies a condition with an outcome that can be used to control the * execution of other tasks. Does not require a cluster to execute and does not support retries or @@ -45,6 +38,10 @@ public class Task { @JsonProperty("description") private String description; + /** An option to disable auto optimization in serverless */ + @JsonProperty("disable_auto_optimization") + private Boolean disableAutoOptimization; + /** * An optional set of email addresses that is notified when runs of this task begin or complete as * well as when this task is deleted. The default behavior is to not send any emails. @@ -53,18 +50,21 @@ public class Task { private TaskEmailNotifications emailNotifications; /** - * If existing_cluster_id, the ID of an existing cluster that is used for all runs of this task. - * Only all-purpose clusters are supported. When running tasks on an existing cluster, you may - * need to manually restart the cluster if it stops responding. We suggest running jobs on new - * clusters for greater reliability. + * The key that references an environment spec in a job. This field is required for Python script, + * Python wheel and dbt tasks when using serverless compute. */ - @JsonProperty("existing_cluster_id") - private String existingClusterId; + @JsonProperty("environment_key") + private String environmentKey; /** - * If for_each_task, indicates that this must execute the nested task within it for the inputs - * provided. + * If existing_cluster_id, the ID of an existing cluster that is used for all runs. When running + * jobs or tasks on an existing cluster, you may need to manually restart the cluster if it stops + * responding. We suggest running jobs and tasks on new clusters for greater reliability */ + @JsonProperty("existing_cluster_id") + private String existingClusterId; + + /** If for_each_task, indicates that this task must execute the nested task within it. */ @JsonProperty("for_each_task") private ForEachTask forEachTask; @@ -80,8 +80,8 @@ public class Task { private String jobClusterKey; /** - * An optional list of libraries to be installed on the cluster that executes the task. The - * default value is an empty list. + * An optional list of libraries to be installed on the cluster. The default value is an empty + * list. */ @JsonProperty("libraries") private Collection libraries; @@ -102,7 +102,7 @@ public class Task { @JsonProperty("min_retry_interval_millis") private Long minRetryIntervalMillis; - /** If new_cluster, a description of a cluster that is created for only for this task. */ + /** If new_cluster, a description of a new cluster that is created for each run. */ @JsonProperty("new_cluster") private com.databricks.sdk.service.compute.ClusterSpec newCluster; @@ -128,7 +128,10 @@ public class Task { @JsonProperty("python_wheel_task") private PythonWheelTask pythonWheelTask; - /** An optional policy to specify whether to retry a task when it times out. */ + /** + * An optional policy to specify whether to retry a job when it times out. The default behavior is + * to not retry on timeout. + */ @JsonProperty("retry_on_timeout") private Boolean retryOnTimeout; @@ -200,15 +203,6 @@ public class Task { @JsonProperty("webhook_notifications") private WebhookNotifications webhookNotifications; - public Task setComputeKey(String computeKey) { - this.computeKey = computeKey; - return this; - } - - public String getComputeKey() { - return computeKey; - } - public Task setConditionTask(ConditionTask conditionTask) { this.conditionTask = conditionTask; return this; @@ -245,6 +239,15 @@ public String getDescription() { return description; } + public Task setDisableAutoOptimization(Boolean disableAutoOptimization) { + this.disableAutoOptimization = disableAutoOptimization; + return this; + } + + public Boolean getDisableAutoOptimization() { + return disableAutoOptimization; + } + public Task setEmailNotifications(TaskEmailNotifications emailNotifications) { this.emailNotifications = emailNotifications; return this; @@ -254,6 +257,15 @@ public TaskEmailNotifications getEmailNotifications() { return emailNotifications; } + public Task setEnvironmentKey(String environmentKey) { + this.environmentKey = environmentKey; + return this; + } + + public String getEnvironmentKey() { + return environmentKey; + } + public Task setExistingClusterId(String existingClusterId) { this.existingClusterId = existingClusterId; return this; @@ -457,12 +469,13 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Task that = (Task) o; - return Objects.equals(computeKey, that.computeKey) - && Objects.equals(conditionTask, that.conditionTask) + return Objects.equals(conditionTask, that.conditionTask) && Objects.equals(dbtTask, that.dbtTask) && Objects.equals(dependsOn, that.dependsOn) && Objects.equals(description, that.description) + && Objects.equals(disableAutoOptimization, that.disableAutoOptimization) && Objects.equals(emailNotifications, that.emailNotifications) + && Objects.equals(environmentKey, that.environmentKey) && Objects.equals(existingClusterId, that.existingClusterId) && Objects.equals(forEachTask, that.forEachTask) && Objects.equals(health, that.health) @@ -490,12 +503,13 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash( - computeKey, conditionTask, dbtTask, dependsOn, description, + disableAutoOptimization, emailNotifications, + environmentKey, existingClusterId, forEachTask, health, @@ -523,12 +537,13 @@ public int hashCode() { @Override public String toString() { return new ToStringer(Task.class) - .add("computeKey", computeKey) .add("conditionTask", conditionTask) .add("dbtTask", dbtTask) .add("dependsOn", dependsOn) .add("description", description) + .add("disableAutoOptimization", disableAutoOptimization) .add("emailNotifications", emailNotifications) + .add("environmentKey", environmentKey) .add("existingClusterId", existingClusterId) .add("forEachTask", forEachTask) .add("health", health) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskEmailNotifications.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskEmailNotifications.java index fd2a2f76d..03e7e8faf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskEmailNotifications.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskEmailNotifications.java @@ -10,6 +10,10 @@ @Generated public class TaskEmailNotifications { + /** If true, do not send email to recipients specified in `on_failure` if the run is skipped. */ + @JsonProperty("no_alert_for_skipped_runs") + private Boolean noAlertForSkippedRuns; + /** * A list of email addresses to be notified when the duration of a run exceeds the threshold * specified for the `RUN_DURATION_SECONDS` metric in the `health` field. If no rule for the @@ -44,6 +48,15 @@ public class TaskEmailNotifications { @JsonProperty("on_success") private Collection onSuccess; + public TaskEmailNotifications setNoAlertForSkippedRuns(Boolean noAlertForSkippedRuns) { + this.noAlertForSkippedRuns = noAlertForSkippedRuns; + return this; + } + + public Boolean getNoAlertForSkippedRuns() { + return noAlertForSkippedRuns; + } + public TaskEmailNotifications setOnDurationWarningThresholdExceeded( Collection onDurationWarningThresholdExceeded) { this.onDurationWarningThresholdExceeded = onDurationWarningThresholdExceeded; @@ -86,7 +99,8 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; TaskEmailNotifications that = (TaskEmailNotifications) o; - return Objects.equals( + return Objects.equals(noAlertForSkippedRuns, that.noAlertForSkippedRuns) + && Objects.equals( onDurationWarningThresholdExceeded, that.onDurationWarningThresholdExceeded) && Objects.equals(onFailure, that.onFailure) && Objects.equals(onStart, that.onStart) @@ -95,12 +109,14 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(onDurationWarningThresholdExceeded, onFailure, onStart, onSuccess); + return Objects.hash( + noAlertForSkippedRuns, onDurationWarningThresholdExceeded, onFailure, onStart, onSuccess); } @Override public String toString() { return new ToStringer(TaskEmailNotifications.class) + .add("noAlertForSkippedRuns", noAlertForSkippedRuns) .add("onDurationWarningThresholdExceeded", onDurationWarningThresholdExceeded) .add("onFailure", onFailure) .add("onStart", onStart) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerInfo.java index e66e14a11..68c5c837b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerInfo.java @@ -7,6 +7,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; +/** Additional details about what triggered the run */ @Generated public class TriggerInfo { /** The run id of the Run Job task run */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettings.java index b25b88e1d..069ac1c68 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettings.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettings.java @@ -17,9 +17,13 @@ public class TriggerSettings { @JsonProperty("pause_status") private PauseStatus pauseStatus; - /** Table trigger settings. */ + /** Old table trigger settings name. Deprecated in favor of `table_update`. */ @JsonProperty("table") - private TableTriggerConfiguration table; + private TableUpdateTriggerConfiguration table; + + /** */ + @JsonProperty("table_update") + private TableUpdateTriggerConfiguration tableUpdate; public TriggerSettings setFileArrival(FileArrivalTriggerConfiguration fileArrival) { this.fileArrival = fileArrival; @@ -39,15 +43,24 @@ public PauseStatus getPauseStatus() { return pauseStatus; } - public TriggerSettings setTable(TableTriggerConfiguration table) { + public TriggerSettings setTable(TableUpdateTriggerConfiguration table) { this.table = table; return this; } - public TableTriggerConfiguration getTable() { + public TableUpdateTriggerConfiguration getTable() { return table; } + public TriggerSettings setTableUpdate(TableUpdateTriggerConfiguration tableUpdate) { + this.tableUpdate = tableUpdate; + return this; + } + + public TableUpdateTriggerConfiguration getTableUpdate() { + return tableUpdate; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -55,12 +68,13 @@ public boolean equals(Object o) { TriggerSettings that = (TriggerSettings) o; return Objects.equals(fileArrival, that.fileArrival) && Objects.equals(pauseStatus, that.pauseStatus) - && Objects.equals(table, that.table); + && Objects.equals(table, that.table) + && Objects.equals(tableUpdate, that.tableUpdate); } @Override public int hashCode() { - return Objects.hash(fileArrival, pauseStatus, table); + return Objects.hash(fileArrival, pauseStatus, table, tableUpdate); } @Override @@ -69,6 +83,7 @@ public String toString() { .add("fileArrival", fileArrival) .add("pauseStatus", pauseStatus) .add("table", table) + .add("tableUpdate", tableUpdate) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotifications.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotifications.java index 67bc5a199..e47a3b422 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotifications.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotifications.java @@ -16,8 +16,7 @@ public class WebhookNotifications { * destinations can be specified for the `on_duration_warning_threshold_exceeded` property. */ @JsonProperty("on_duration_warning_threshold_exceeded") - private Collection - onDurationWarningThresholdExceeded; + private Collection onDurationWarningThresholdExceeded; /** * An optional list of system notification IDs to call when the run fails. A maximum of 3 @@ -41,14 +40,12 @@ public class WebhookNotifications { private Collection onSuccess; public WebhookNotifications setOnDurationWarningThresholdExceeded( - Collection - onDurationWarningThresholdExceeded) { + Collection onDurationWarningThresholdExceeded) { this.onDurationWarningThresholdExceeded = onDurationWarningThresholdExceeded; return this; } - public Collection - getOnDurationWarningThresholdExceeded() { + public Collection getOnDurationWarningThresholdExceeded() { return onDurationWarningThresholdExceeded; } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingRequest.java new file mode 100755 index 000000000..8c788493a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingRequest.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AddExchangeForListingRequest { + /** */ + @JsonProperty("exchange_id") + private String exchangeId; + + /** */ + @JsonProperty("listing_id") + private String listingId; + + public AddExchangeForListingRequest setExchangeId(String exchangeId) { + this.exchangeId = exchangeId; + return this; + } + + public String getExchangeId() { + return exchangeId; + } + + public AddExchangeForListingRequest setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AddExchangeForListingRequest that = (AddExchangeForListingRequest) o; + return Objects.equals(exchangeId, that.exchangeId) && Objects.equals(listingId, that.listingId); + } + + @Override + public int hashCode() { + return Objects.hash(exchangeId, listingId); + } + + @Override + public String toString() { + return new ToStringer(AddExchangeForListingRequest.class) + .add("exchangeId", exchangeId) + .add("listingId", listingId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingResponse.java new file mode 100755 index 000000000..4dc2a5d00 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AddExchangeForListingResponse.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class AddExchangeForListingResponse { + /** */ + @JsonProperty("exchange_for_listing") + private ExchangeListing exchangeForListing; + + public AddExchangeForListingResponse setExchangeForListing(ExchangeListing exchangeForListing) { + this.exchangeForListing = exchangeForListing; + return this; + } + + public ExchangeListing getExchangeForListing() { + return exchangeForListing; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AddExchangeForListingResponse that = (AddExchangeForListingResponse) o; + return Objects.equals(exchangeForListing, that.exchangeForListing); + } + + @Override + public int hashCode() { + return Objects.hash(exchangeForListing); + } + + @Override + public String toString() { + return new ToStringer(AddExchangeForListingResponse.class) + .add("exchangeForListing", exchangeForListing) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java new file mode 100755 index 000000000..f77ffde80 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/AssetType.java @@ -0,0 +1,15 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum AssetType { + ASSET_TYPE_DATA_TABLE, + ASSET_TYPE_GIT_REPO, + ASSET_TYPE_MEDIA, + ASSET_TYPE_MODEL, + ASSET_TYPE_NOTEBOOK, + ASSET_TYPE_UNSPECIFIED, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Category.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Category.java new file mode 100755 index 000000000..9aa94d271 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Category.java @@ -0,0 +1,31 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum Category { + ADVERTISING_AND_MARKETING, + CLIMATE_AND_ENVIRONMENT, + COMMERCE, + DEMOGRAPHICS, + ECONOMICS, + EDUCATION, + ENERGY, + FINANCIAL, + GAMING, + GEOSPATIAL, + HEALTH, + LOOKUP_TABLES, + MANUFACTURING, + MEDIA, + OTHER, + PUBLIC_SECTOR, + RETAIL, + SCIENCE_AND_RESEARCH, + SECURITY, + SPORTS, + TRANSPORTATION_AND_LOGISTICS, + TRAVEL_AND_TOURISM, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerFulfillmentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerFulfillmentsAPI.java new file mode 100755 index 000000000..bfb94cfdc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerFulfillmentsAPI.java @@ -0,0 +1,79 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Fulfillments are entities that allow consumers to preview installations. */ +@Generated +public class ConsumerFulfillmentsAPI { + private static final Logger LOG = LoggerFactory.getLogger(ConsumerFulfillmentsAPI.class); + + private final ConsumerFulfillmentsService impl; + + /** Regular-use constructor */ + public ConsumerFulfillmentsAPI(ApiClient apiClient) { + impl = new ConsumerFulfillmentsImpl(apiClient); + } + + /** Constructor for mocks */ + public ConsumerFulfillmentsAPI(ConsumerFulfillmentsService mock) { + impl = mock; + } + + public Iterable get(String listingId) { + return get(new GetListingContentMetadataRequest().setListingId(listingId)); + } + + /** + * Get listing content metadata. + * + *

Get a high level preview of the metadata of listing installable content. + */ + public Iterable get(GetListingContentMetadataRequest request) { + return new Paginator<>( + request, + impl::get, + GetListingContentMetadataResponse::getSharedDataObjects, + response -> { + String token = response.getNextPageToken(); + if (token == null) { + return null; + } + return request.setPageToken(token); + }); + } + + public Iterable list(String listingId) { + return list(new ListFulfillmentsRequest().setListingId(listingId)); + } + + /** + * List all listing fulfillments. + * + *

Get all listings fulfillments associated with a listing. A _fulfillment_ is a potential + * installation. Standard installations contain metadata about the attached share or git repo. + * Only one of these fields will be present. Personalized installations contain metadata about the + * attached share or git repo, as well as the Delta Sharing recipient type. + */ + public Iterable list(ListFulfillmentsRequest request) { + return new Paginator<>( + request, + impl::list, + ListFulfillmentsResponse::getFulfillments, + response -> { + String token = response.getNextPageToken(); + if (token == null) { + return null; + } + return request.setPageToken(token); + }); + } + + public ConsumerFulfillmentsService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerFulfillmentsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerFulfillmentsImpl.java new file mode 100755 index 000000000..98c870847 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerFulfillmentsImpl.java @@ -0,0 +1,36 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of ConsumerFulfillments */ +@Generated +class ConsumerFulfillmentsImpl implements ConsumerFulfillmentsService { + private final ApiClient apiClient; + + public ConsumerFulfillmentsImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public GetListingContentMetadataResponse get(GetListingContentMetadataRequest request) { + String path = + String.format("/api/2.1/marketplace-consumer/listings/%s/content", request.getListingId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, GetListingContentMetadataResponse.class, headers); + } + + @Override + public ListFulfillmentsResponse list(ListFulfillmentsRequest request) { + String path = + String.format( + "/api/2.1/marketplace-consumer/listings/%s/fulfillments", request.getListingId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, ListFulfillmentsResponse.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerFulfillmentsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerFulfillmentsService.java new file mode 100755 index 000000000..0a7737bd6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerFulfillmentsService.java @@ -0,0 +1,32 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +/** + * Fulfillments are entities that allow consumers to preview installations. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface ConsumerFulfillmentsService { + /** + * Get listing content metadata. + * + *

Get a high level preview of the metadata of listing installable content. + */ + GetListingContentMetadataResponse get( + GetListingContentMetadataRequest getListingContentMetadataRequest); + + /** + * List all listing fulfillments. + * + *

Get all listings fulfillments associated with a listing. A _fulfillment_ is a potential + * installation. Standard installations contain metadata about the attached share or git repo. + * Only one of these fields will be present. Personalized installations contain metadata about the + * attached share or git repo, as well as the Delta Sharing recipient type. + */ + ListFulfillmentsResponse list(ListFulfillmentsRequest listFulfillmentsRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsAPI.java new file mode 100755 index 000000000..4cb983d47 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsAPI.java @@ -0,0 +1,122 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Installations are entities that allow consumers to interact with Databricks Marketplace listings. + */ +@Generated +public class ConsumerInstallationsAPI { + private static final Logger LOG = LoggerFactory.getLogger(ConsumerInstallationsAPI.class); + + private final ConsumerInstallationsService impl; + + /** Regular-use constructor */ + public ConsumerInstallationsAPI(ApiClient apiClient) { + impl = new ConsumerInstallationsImpl(apiClient); + } + + /** Constructor for mocks */ + public ConsumerInstallationsAPI(ConsumerInstallationsService mock) { + impl = mock; + } + + public Installation create(String listingId) { + return create(new CreateInstallationRequest().setListingId(listingId)); + } + + /** + * Install from a listing. + * + *

Install payload associated with a Databricks Marketplace listing. + */ + public Installation create(CreateInstallationRequest request) { + return impl.create(request); + } + + public void delete(String listingId, String installationId) { + delete( + new DeleteInstallationRequest().setListingId(listingId).setInstallationId(installationId)); + } + + /** + * Uninstall from a listing. + * + *

Uninstall an installation associated with a Databricks Marketplace listing. + */ + public void delete(DeleteInstallationRequest request) { + impl.delete(request); + } + + /** + * List all installations. + * + *

List all installations across all listings. + */ + public Iterable list(ListAllInstallationsRequest request) { + return new Paginator<>( + request, + impl::list, + ListAllInstallationsResponse::getInstallations, + response -> { + String token = response.getNextPageToken(); + if (token == null) { + return null; + } + return request.setPageToken(token); + }); + } + + public Iterable listListingInstallations(String listingId) { + return listListingInstallations(new ListInstallationsRequest().setListingId(listingId)); + } + + /** + * List installations for a listing. + * + *

List all installations for a particular listing. + */ + public Iterable listListingInstallations(ListInstallationsRequest request) { + return new Paginator<>( + request, + impl::listListingInstallations, + ListInstallationsResponse::getInstallations, + response -> { + String token = response.getNextPageToken(); + if (token == null) { + return null; + } + return request.setPageToken(token); + }); + } + + public UpdateInstallationResponse update( + String listingId, String installationId, InstallationDetail installation) { + return update( + new UpdateInstallationRequest() + .setListingId(listingId) + .setInstallationId(installationId) + .setInstallation(installation)); + } + + /** + * Update an installation. + * + *

This is a update API that will update the part of the fields defined in the installation + * table as well as interact with external services according to the fields not included in the + * installation table 1. the token will be rotate if the rotateToken flag is true 2. the token + * will be forcibly rotate if the rotateToken flag is true and the tokenInfo field is empty + */ + public UpdateInstallationResponse update(UpdateInstallationRequest request) { + return impl.update(request); + } + + public ConsumerInstallationsService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsImpl.java new file mode 100755 index 000000000..6a8f91809 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsImpl.java @@ -0,0 +1,69 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of ConsumerInstallations */ +@Generated +class ConsumerInstallationsImpl implements ConsumerInstallationsService { + private final ApiClient apiClient; + + public ConsumerInstallationsImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public Installation create(CreateInstallationRequest request) { + String path = + String.format( + "/api/2.1/marketplace-consumer/listings/%s/installations", request.getListingId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.POST(path, request, Installation.class, headers); + } + + @Override + public void delete(DeleteInstallationRequest request) { + String path = + String.format( + "/api/2.1/marketplace-consumer/listings/%s/installations/%s", + request.getListingId(), request.getInstallationId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + apiClient.DELETE(path, request, DeleteInstallationResponse.class, headers); + } + + @Override + public ListAllInstallationsResponse list(ListAllInstallationsRequest request) { + String path = "/api/2.1/marketplace-consumer/installations"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, ListAllInstallationsResponse.class, headers); + } + + @Override + public ListInstallationsResponse listListingInstallations(ListInstallationsRequest request) { + String path = + String.format( + "/api/2.1/marketplace-consumer/listings/%s/installations", request.getListingId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, ListInstallationsResponse.class, headers); + } + + @Override + public UpdateInstallationResponse update(UpdateInstallationRequest request) { + String path = + String.format( + "/api/2.1/marketplace-consumer/listings/%s/installations/%s", + request.getListingId(), request.getInstallationId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.PUT(path, request, UpdateInstallationResponse.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsService.java new file mode 100755 index 000000000..0b94c9a25 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerInstallationsService.java @@ -0,0 +1,53 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +/** + * Installations are entities that allow consumers to interact with Databricks Marketplace listings. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface ConsumerInstallationsService { + /** + * Install from a listing. + * + *

Install payload associated with a Databricks Marketplace listing. + */ + Installation create(CreateInstallationRequest createInstallationRequest); + + /** + * Uninstall from a listing. + * + *

Uninstall an installation associated with a Databricks Marketplace listing. + */ + void delete(DeleteInstallationRequest deleteInstallationRequest); + + /** + * List all installations. + * + *

List all installations across all listings. + */ + ListAllInstallationsResponse list(ListAllInstallationsRequest listAllInstallationsRequest); + + /** + * List installations for a listing. + * + *

List all installations for a particular listing. + */ + ListInstallationsResponse listListingInstallations( + ListInstallationsRequest listInstallationsRequest); + + /** + * Update an installation. + * + *

This is a update API that will update the part of the fields defined in the installation + * table as well as interact with external services according to the fields not included in the + * installation table 1. the token will be rotate if the rotateToken flag is true 2. the token + * will be forcibly rotate if the rotateToken flag is true and the tokenInfo field is empty + */ + UpdateInstallationResponse update(UpdateInstallationRequest updateInstallationRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerListingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerListingsAPI.java new file mode 100755 index 000000000..ef71edbf4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerListingsAPI.java @@ -0,0 +1,89 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Listings are the core entities in the Marketplace. They represent the products that are available + * for consumption. + */ +@Generated +public class ConsumerListingsAPI { + private static final Logger LOG = LoggerFactory.getLogger(ConsumerListingsAPI.class); + + private final ConsumerListingsService impl; + + /** Regular-use constructor */ + public ConsumerListingsAPI(ApiClient apiClient) { + impl = new ConsumerListingsImpl(apiClient); + } + + /** Constructor for mocks */ + public ConsumerListingsAPI(ConsumerListingsService mock) { + impl = mock; + } + + public GetListingResponse get(String id) { + return get(new GetListingRequest().setId(id)); + } + + /** + * Get listing. + * + *

Get a published listing in the Databricks Marketplace that the consumer has access to. + */ + public GetListingResponse get(GetListingRequest request) { + return impl.get(request); + } + + /** + * List listings. + * + *

List all published listings in the Databricks Marketplace that the consumer has access to. + */ + public Iterable

list(ListListingsRequest request) { + return new Paginator<>( + request, + impl::list, + ListListingsResponse::getListings, + response -> { + String token = response.getNextPageToken(); + if (token == null) { + return null; + } + return request.setPageToken(token); + }); + } + + public Iterable search(String query) { + return search(new SearchListingsRequest().setQuery(query)); + } + + /** + * Search listings. + * + *

Search published listings in the Databricks Marketplace that the consumer has access to. + * This query supports a variety of different search parameters and performs fuzzy matching. + */ + public Iterable

search(SearchListingsRequest request) { + return new Paginator<>( + request, + impl::search, + SearchListingsResponse::getListings, + response -> { + String token = response.getNextPageToken(); + if (token == null) { + return null; + } + return request.setPageToken(token); + }); + } + + public ConsumerListingsService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerListingsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerListingsImpl.java new file mode 100755 index 000000000..d930c0631 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerListingsImpl.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of ConsumerListings */ +@Generated +class ConsumerListingsImpl implements ConsumerListingsService { + private final ApiClient apiClient; + + public ConsumerListingsImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public GetListingResponse get(GetListingRequest request) { + String path = String.format("/api/2.1/marketplace-consumer/listings/%s", request.getId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, GetListingResponse.class, headers); + } + + @Override + public ListListingsResponse list(ListListingsRequest request) { + String path = "/api/2.1/marketplace-consumer/listings"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, ListListingsResponse.class, headers); + } + + @Override + public SearchListingsResponse search(SearchListingsRequest request) { + String path = "/api/2.1/marketplace-consumer/search-listings"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, SearchListingsResponse.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerListingsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerListingsService.java new file mode 100755 index 000000000..d26e8e174 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerListingsService.java @@ -0,0 +1,37 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +/** + * Listings are the core entities in the Marketplace. They represent the products that are available + * for consumption. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface ConsumerListingsService { + /** + * Get listing. + * + *

Get a published listing in the Databricks Marketplace that the consumer has access to. + */ + GetListingResponse get(GetListingRequest getListingRequest); + + /** + * List listings. + * + *

List all published listings in the Databricks Marketplace that the consumer has access to. + */ + ListListingsResponse list(ListListingsRequest listListingsRequest); + + /** + * Search listings. + * + *

Search published listings in the Databricks Marketplace that the consumer has access to. + * This query supports a variety of different search parameters and performs fuzzy matching. + */ + SearchListingsResponse search(SearchListingsRequest searchListingsRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerPersonalizationRequestsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerPersonalizationRequestsAPI.java new file mode 100755 index 000000000..a762f3e82 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerPersonalizationRequestsAPI.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Personalization Requests allow customers to interact with the individualized Marketplace listing + * flow. + */ +@Generated +public class ConsumerPersonalizationRequestsAPI { + private static final Logger LOG = + LoggerFactory.getLogger(ConsumerPersonalizationRequestsAPI.class); + + private final ConsumerPersonalizationRequestsService impl; + + /** Regular-use constructor */ + public ConsumerPersonalizationRequestsAPI(ApiClient apiClient) { + impl = new ConsumerPersonalizationRequestsImpl(apiClient); + } + + /** Constructor for mocks */ + public ConsumerPersonalizationRequestsAPI(ConsumerPersonalizationRequestsService mock) { + impl = mock; + } + + public CreatePersonalizationRequestResponse create( + String listingId, String intendedUse, ConsumerTerms acceptedConsumerTerms) { + return create( + new CreatePersonalizationRequest() + .setListingId(listingId) + .setIntendedUse(intendedUse) + .setAcceptedConsumerTerms(acceptedConsumerTerms)); + } + + /** + * Create a personalization request. + * + *

Create a personalization request for a listing. + */ + public CreatePersonalizationRequestResponse create(CreatePersonalizationRequest request) { + return impl.create(request); + } + + public GetPersonalizationRequestResponse get(String listingId) { + return get(new GetPersonalizationRequestRequest().setListingId(listingId)); + } + + /** + * Get the personalization request for a listing. + * + *

Get the personalization request for a listing. Each consumer can make at *most* one + * personalization request for a listing. + */ + public GetPersonalizationRequestResponse get(GetPersonalizationRequestRequest request) { + return impl.get(request); + } + + /** + * List all personalization requests. + * + *

List personalization requests for a consumer across all listings. + */ + public Iterable list(ListAllPersonalizationRequestsRequest request) { + return new Paginator<>( + request, + impl::list, + ListAllPersonalizationRequestsResponse::getPersonalizationRequests, + response -> { + String token = response.getNextPageToken(); + if (token == null) { + return null; + } + return request.setPageToken(token); + }); + } + + public ConsumerPersonalizationRequestsService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerPersonalizationRequestsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerPersonalizationRequestsImpl.java new file mode 100755 index 000000000..6f35542a7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerPersonalizationRequestsImpl.java @@ -0,0 +1,49 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of ConsumerPersonalizationRequests */ +@Generated +class ConsumerPersonalizationRequestsImpl implements ConsumerPersonalizationRequestsService { + private final ApiClient apiClient; + + public ConsumerPersonalizationRequestsImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public CreatePersonalizationRequestResponse create(CreatePersonalizationRequest request) { + String path = + String.format( + "/api/2.1/marketplace-consumer/listings/%s/personalization-requests", + request.getListingId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.POST(path, request, CreatePersonalizationRequestResponse.class, headers); + } + + @Override + public GetPersonalizationRequestResponse get(GetPersonalizationRequestRequest request) { + String path = + String.format( + "/api/2.1/marketplace-consumer/listings/%s/personalization-requests", + request.getListingId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, GetPersonalizationRequestResponse.class, headers); + } + + @Override + public ListAllPersonalizationRequestsResponse list( + ListAllPersonalizationRequestsRequest request) { + String path = "/api/2.1/marketplace-consumer/personalization-requests"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, ListAllPersonalizationRequestsResponse.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerPersonalizationRequestsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerPersonalizationRequestsService.java new file mode 100755 index 000000000..076d75c1c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerPersonalizationRequestsService.java @@ -0,0 +1,40 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +/** + * Personalization Requests allow customers to interact with the individualized Marketplace listing + * flow. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface ConsumerPersonalizationRequestsService { + /** + * Create a personalization request. + * + *

Create a personalization request for a listing. + */ + CreatePersonalizationRequestResponse create( + CreatePersonalizationRequest createPersonalizationRequest); + + /** + * Get the personalization request for a listing. + * + *

Get the personalization request for a listing. Each consumer can make at *most* one + * personalization request for a listing. + */ + GetPersonalizationRequestResponse get( + GetPersonalizationRequestRequest getPersonalizationRequestRequest); + + /** + * List all personalization requests. + * + *

List personalization requests for a consumer across all listings. + */ + ListAllPersonalizationRequestsResponse list( + ListAllPersonalizationRequestsRequest listAllPersonalizationRequestsRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerProvidersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerProvidersAPI.java new file mode 100755 index 000000000..1b586c5f0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerProvidersAPI.java @@ -0,0 +1,62 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Providers are the entities that publish listings to the Marketplace. */ +@Generated +public class ConsumerProvidersAPI { + private static final Logger LOG = LoggerFactory.getLogger(ConsumerProvidersAPI.class); + + private final ConsumerProvidersService impl; + + /** Regular-use constructor */ + public ConsumerProvidersAPI(ApiClient apiClient) { + impl = new ConsumerProvidersImpl(apiClient); + } + + /** Constructor for mocks */ + public ConsumerProvidersAPI(ConsumerProvidersService mock) { + impl = mock; + } + + public GetProviderResponse get(String id) { + return get(new GetProviderRequest().setId(id)); + } + + /** + * Get a provider. + * + *

Get a provider in the Databricks Marketplace with at least one visible listing. + */ + public GetProviderResponse get(GetProviderRequest request) { + return impl.get(request); + } + + /** + * List providers. + * + *

List all providers in the Databricks Marketplace with at least one visible listing. + */ + public Iterable list(ListProvidersRequest request) { + return new Paginator<>( + request, + impl::list, + ListProvidersResponse::getProviders, + response -> { + String token = response.getNextPageToken(); + if (token == null) { + return null; + } + return request.setPageToken(token); + }); + } + + public ConsumerProvidersService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerProvidersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerProvidersImpl.java new file mode 100755 index 000000000..261557fdd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerProvidersImpl.java @@ -0,0 +1,33 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of ConsumerProviders */ +@Generated +class ConsumerProvidersImpl implements ConsumerProvidersService { + private final ApiClient apiClient; + + public ConsumerProvidersImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public GetProviderResponse get(GetProviderRequest request) { + String path = String.format("/api/2.1/marketplace-consumer/providers/%s", request.getId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, GetProviderResponse.class, headers); + } + + @Override + public ListProvidersResponse list(ListProvidersRequest request) { + String path = "/api/2.1/marketplace-consumer/providers"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, ListProvidersResponse.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerProvidersService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerProvidersService.java new file mode 100755 index 000000000..2f78fd91c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerProvidersService.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +/** + * Providers are the entities that publish listings to the Marketplace. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface ConsumerProvidersService { + /** + * Get a provider. + * + *

Get a provider in the Databricks Marketplace with at least one visible listing. + */ + GetProviderResponse get(GetProviderRequest getProviderRequest); + + /** + * List providers. + * + *

List all providers in the Databricks Marketplace with at least one visible listing. + */ + ListProvidersResponse list(ListProvidersRequest listProvidersRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ComputeSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerTerms.java similarity index 50% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ComputeSpec.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerTerms.java index f38ca517b..4be619f52 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ComputeSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ConsumerTerms.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.compute; +package com.databricks.sdk.service.marketplace; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; @@ -8,35 +8,35 @@ import java.util.Objects; @Generated -public class ComputeSpec { - /** The kind of compute described by this compute specification. */ - @JsonProperty("kind") - private ComputeSpecKind kind; +public class ConsumerTerms { + /** */ + @JsonProperty("version") + private String version; - public ComputeSpec setKind(ComputeSpecKind kind) { - this.kind = kind; + public ConsumerTerms setVersion(String version) { + this.version = version; return this; } - public ComputeSpecKind getKind() { - return kind; + public String getVersion() { + return version; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - ComputeSpec that = (ComputeSpec) o; - return Objects.equals(kind, that.kind); + ConsumerTerms that = (ConsumerTerms) o; + return Objects.equals(version, that.version); } @Override public int hashCode() { - return Objects.hash(kind); + return Objects.hash(version); } @Override public String toString() { - return new ToStringer(ComputeSpec.class).add("kind", kind).toString(); + return new ToStringer(ConsumerTerms.class).add("version", version).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ContactInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ContactInfo.java new file mode 100755 index 000000000..e0e4135e6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ContactInfo.java @@ -0,0 +1,90 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** contact info for the consumer requesting data or performing a listing installation */ +@Generated +public class ContactInfo { + /** */ + @JsonProperty("company") + private String company; + + /** */ + @JsonProperty("email") + private String email; + + /** */ + @JsonProperty("first_name") + private String firstName; + + /** */ + @JsonProperty("last_name") + private String lastName; + + public ContactInfo setCompany(String company) { + this.company = company; + return this; + } + + public String getCompany() { + return company; + } + + public ContactInfo setEmail(String email) { + this.email = email; + return this; + } + + public String getEmail() { + return email; + } + + public ContactInfo setFirstName(String firstName) { + this.firstName = firstName; + return this; + } + + public String getFirstName() { + return firstName; + } + + public ContactInfo setLastName(String lastName) { + this.lastName = lastName; + return this; + } + + public String getLastName() { + return lastName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ContactInfo that = (ContactInfo) o; + return Objects.equals(company, that.company) + && Objects.equals(email, that.email) + && Objects.equals(firstName, that.firstName) + && Objects.equals(lastName, that.lastName); + } + + @Override + public int hashCode() { + return Objects.hash(company, email, firstName, lastName); + } + + @Override + public String toString() { + return new ToStringer(ContactInfo.class) + .add("company", company) + .add("email", email) + .add("firstName", firstName) + .add("lastName", lastName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Cost.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Cost.java new file mode 100755 index 000000000..ca81d42dc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Cost.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum Cost { + FREE, + PAID, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeFilterRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeFilterRequest.java new file mode 100755 index 000000000..964a88910 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeFilterRequest.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateExchangeFilterRequest { + /** */ + @JsonProperty("filter") + private ExchangeFilter filter; + + public CreateExchangeFilterRequest setFilter(ExchangeFilter filter) { + this.filter = filter; + return this; + } + + public ExchangeFilter getFilter() { + return filter; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateExchangeFilterRequest that = (CreateExchangeFilterRequest) o; + return Objects.equals(filter, that.filter); + } + + @Override + public int hashCode() { + return Objects.hash(filter); + } + + @Override + public String toString() { + return new ToStringer(CreateExchangeFilterRequest.class).add("filter", filter).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeFilterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeFilterResponse.java new file mode 100755 index 000000000..152a5baa4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeFilterResponse.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateExchangeFilterResponse { + /** */ + @JsonProperty("filter_id") + private String filterId; + + public CreateExchangeFilterResponse setFilterId(String filterId) { + this.filterId = filterId; + return this; + } + + public String getFilterId() { + return filterId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateExchangeFilterResponse that = (CreateExchangeFilterResponse) o; + return Objects.equals(filterId, that.filterId); + } + + @Override + public int hashCode() { + return Objects.hash(filterId); + } + + @Override + public String toString() { + return new ToStringer(CreateExchangeFilterResponse.class).add("filterId", filterId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeRequest.java new file mode 100755 index 000000000..09e19edee --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeRequest.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateExchangeRequest { + /** */ + @JsonProperty("exchange") + private Exchange exchange; + + public CreateExchangeRequest setExchange(Exchange exchange) { + this.exchange = exchange; + return this; + } + + public Exchange getExchange() { + return exchange; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateExchangeRequest that = (CreateExchangeRequest) o; + return Objects.equals(exchange, that.exchange); + } + + @Override + public int hashCode() { + return Objects.hash(exchange); + } + + @Override + public String toString() { + return new ToStringer(CreateExchangeRequest.class).add("exchange", exchange).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeResponse.java new file mode 100755 index 000000000..528bf3ff7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateExchangeResponse.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateExchangeResponse { + /** */ + @JsonProperty("exchange_id") + private String exchangeId; + + public CreateExchangeResponse setExchangeId(String exchangeId) { + this.exchangeId = exchangeId; + return this; + } + + public String getExchangeId() { + return exchangeId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateExchangeResponse that = (CreateExchangeResponse) o; + return Objects.equals(exchangeId, that.exchangeId); + } + + @Override + public int hashCode() { + return Objects.hash(exchangeId); + } + + @Override + public String toString() { + return new ToStringer(CreateExchangeResponse.class).add("exchangeId", exchangeId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateFileRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateFileRequest.java new file mode 100755 index 000000000..886bbdefc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateFileRequest.java @@ -0,0 +1,89 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateFileRequest { + /** */ + @JsonProperty("display_name") + private String displayName; + + /** */ + @JsonProperty("file_parent") + private FileParent fileParent; + + /** */ + @JsonProperty("marketplace_file_type") + private MarketplaceFileType marketplaceFileType; + + /** */ + @JsonProperty("mime_type") + private String mimeType; + + public CreateFileRequest setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public CreateFileRequest setFileParent(FileParent fileParent) { + this.fileParent = fileParent; + return this; + } + + public FileParent getFileParent() { + return fileParent; + } + + public CreateFileRequest setMarketplaceFileType(MarketplaceFileType marketplaceFileType) { + this.marketplaceFileType = marketplaceFileType; + return this; + } + + public MarketplaceFileType getMarketplaceFileType() { + return marketplaceFileType; + } + + public CreateFileRequest setMimeType(String mimeType) { + this.mimeType = mimeType; + return this; + } + + public String getMimeType() { + return mimeType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateFileRequest that = (CreateFileRequest) o; + return Objects.equals(displayName, that.displayName) + && Objects.equals(fileParent, that.fileParent) + && Objects.equals(marketplaceFileType, that.marketplaceFileType) + && Objects.equals(mimeType, that.mimeType); + } + + @Override + public int hashCode() { + return Objects.hash(displayName, fileParent, marketplaceFileType, mimeType); + } + + @Override + public String toString() { + return new ToStringer(CreateFileRequest.class) + .add("displayName", displayName) + .add("fileParent", fileParent) + .add("marketplaceFileType", marketplaceFileType) + .add("mimeType", mimeType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateFileResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateFileResponse.java new file mode 100755 index 000000000..a980c61d1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateFileResponse.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateFileResponse { + /** */ + @JsonProperty("file_info") + private FileInfo fileInfo; + + /** Pre-signed POST URL to blob storage */ + @JsonProperty("upload_url") + private String uploadUrl; + + public CreateFileResponse setFileInfo(FileInfo fileInfo) { + this.fileInfo = fileInfo; + return this; + } + + public FileInfo getFileInfo() { + return fileInfo; + } + + public CreateFileResponse setUploadUrl(String uploadUrl) { + this.uploadUrl = uploadUrl; + return this; + } + + public String getUploadUrl() { + return uploadUrl; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateFileResponse that = (CreateFileResponse) o; + return Objects.equals(fileInfo, that.fileInfo) && Objects.equals(uploadUrl, that.uploadUrl); + } + + @Override + public int hashCode() { + return Objects.hash(fileInfo, uploadUrl); + } + + @Override + public String toString() { + return new ToStringer(CreateFileResponse.class) + .add("fileInfo", fileInfo) + .add("uploadUrl", uploadUrl) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateInstallationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateInstallationRequest.java new file mode 100755 index 000000000..03dc42208 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateInstallationRequest.java @@ -0,0 +1,119 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateInstallationRequest { + /** */ + @JsonProperty("accepted_consumer_terms") + private ConsumerTerms acceptedConsumerTerms; + + /** */ + @JsonProperty("catalog_name") + private String catalogName; + + /** */ + private String listingId; + + /** */ + @JsonProperty("recipient_type") + private DeltaSharingRecipientType recipientType; + + /** for git repo installations */ + @JsonProperty("repo_detail") + private RepoInstallation repoDetail; + + /** */ + @JsonProperty("share_name") + private String shareName; + + public CreateInstallationRequest setAcceptedConsumerTerms(ConsumerTerms acceptedConsumerTerms) { + this.acceptedConsumerTerms = acceptedConsumerTerms; + return this; + } + + public ConsumerTerms getAcceptedConsumerTerms() { + return acceptedConsumerTerms; + } + + public CreateInstallationRequest setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public CreateInstallationRequest setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + public CreateInstallationRequest setRecipientType(DeltaSharingRecipientType recipientType) { + this.recipientType = recipientType; + return this; + } + + public DeltaSharingRecipientType getRecipientType() { + return recipientType; + } + + public CreateInstallationRequest setRepoDetail(RepoInstallation repoDetail) { + this.repoDetail = repoDetail; + return this; + } + + public RepoInstallation getRepoDetail() { + return repoDetail; + } + + public CreateInstallationRequest setShareName(String shareName) { + this.shareName = shareName; + return this; + } + + public String getShareName() { + return shareName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateInstallationRequest that = (CreateInstallationRequest) o; + return Objects.equals(acceptedConsumerTerms, that.acceptedConsumerTerms) + && Objects.equals(catalogName, that.catalogName) + && Objects.equals(listingId, that.listingId) + && Objects.equals(recipientType, that.recipientType) + && Objects.equals(repoDetail, that.repoDetail) + && Objects.equals(shareName, that.shareName); + } + + @Override + public int hashCode() { + return Objects.hash( + acceptedConsumerTerms, catalogName, listingId, recipientType, repoDetail, shareName); + } + + @Override + public String toString() { + return new ToStringer(CreateInstallationRequest.class) + .add("acceptedConsumerTerms", acceptedConsumerTerms) + .add("catalogName", catalogName) + .add("listingId", listingId) + .add("recipientType", recipientType) + .add("repoDetail", repoDetail) + .add("shareName", shareName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateListingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateListingRequest.java new file mode 100755 index 000000000..0f89c08ed --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateListingRequest.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateListingRequest { + /** */ + @JsonProperty("listing") + private Listing listing; + + public CreateListingRequest setListing(Listing listing) { + this.listing = listing; + return this; + } + + public Listing getListing() { + return listing; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateListingRequest that = (CreateListingRequest) o; + return Objects.equals(listing, that.listing); + } + + @Override + public int hashCode() { + return Objects.hash(listing); + } + + @Override + public String toString() { + return new ToStringer(CreateListingRequest.class).add("listing", listing).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateListingResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateListingResponse.java new file mode 100755 index 000000000..39814421b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateListingResponse.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateListingResponse { + /** */ + @JsonProperty("listing_id") + private String listingId; + + public CreateListingResponse setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateListingResponse that = (CreateListingResponse) o; + return Objects.equals(listingId, that.listingId); + } + + @Override + public int hashCode() { + return Objects.hash(listingId); + } + + @Override + public String toString() { + return new ToStringer(CreateListingResponse.class).add("listingId", listingId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreatePersonalizationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreatePersonalizationRequest.java new file mode 100755 index 000000000..764db7066 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreatePersonalizationRequest.java @@ -0,0 +1,174 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +/** Data request messages also creates a lead (maybe) */ +@Generated +public class CreatePersonalizationRequest { + /** */ + @JsonProperty("accepted_consumer_terms") + private ConsumerTerms acceptedConsumerTerms; + + /** */ + @JsonProperty("comment") + private String comment; + + /** */ + @JsonProperty("company") + private String company; + + /** */ + @JsonProperty("first_name") + private String firstName; + + /** */ + @JsonProperty("intended_use") + private String intendedUse; + + /** */ + @JsonProperty("is_from_lighthouse") + private Boolean isFromLighthouse; + + /** */ + @JsonProperty("last_name") + private String lastName; + + /** */ + private String listingId; + + /** */ + @JsonProperty("recipient_type") + private DeltaSharingRecipientType recipientType; + + public CreatePersonalizationRequest setAcceptedConsumerTerms( + ConsumerTerms acceptedConsumerTerms) { + this.acceptedConsumerTerms = acceptedConsumerTerms; + return this; + } + + public ConsumerTerms getAcceptedConsumerTerms() { + return acceptedConsumerTerms; + } + + public CreatePersonalizationRequest setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public CreatePersonalizationRequest setCompany(String company) { + this.company = company; + return this; + } + + public String getCompany() { + return company; + } + + public CreatePersonalizationRequest setFirstName(String firstName) { + this.firstName = firstName; + return this; + } + + public String getFirstName() { + return firstName; + } + + public CreatePersonalizationRequest setIntendedUse(String intendedUse) { + this.intendedUse = intendedUse; + return this; + } + + public String getIntendedUse() { + return intendedUse; + } + + public CreatePersonalizationRequest setIsFromLighthouse(Boolean isFromLighthouse) { + this.isFromLighthouse = isFromLighthouse; + return this; + } + + public Boolean getIsFromLighthouse() { + return isFromLighthouse; + } + + public CreatePersonalizationRequest setLastName(String lastName) { + this.lastName = lastName; + return this; + } + + public String getLastName() { + return lastName; + } + + public CreatePersonalizationRequest setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + public CreatePersonalizationRequest setRecipientType(DeltaSharingRecipientType recipientType) { + this.recipientType = recipientType; + return this; + } + + public DeltaSharingRecipientType getRecipientType() { + return recipientType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreatePersonalizationRequest that = (CreatePersonalizationRequest) o; + return Objects.equals(acceptedConsumerTerms, that.acceptedConsumerTerms) + && Objects.equals(comment, that.comment) + && Objects.equals(company, that.company) + && Objects.equals(firstName, that.firstName) + && Objects.equals(intendedUse, that.intendedUse) + && Objects.equals(isFromLighthouse, that.isFromLighthouse) + && Objects.equals(lastName, that.lastName) + && Objects.equals(listingId, that.listingId) + && Objects.equals(recipientType, that.recipientType); + } + + @Override + public int hashCode() { + return Objects.hash( + acceptedConsumerTerms, + comment, + company, + firstName, + intendedUse, + isFromLighthouse, + lastName, + listingId, + recipientType); + } + + @Override + public String toString() { + return new ToStringer(CreatePersonalizationRequest.class) + .add("acceptedConsumerTerms", acceptedConsumerTerms) + .add("comment", comment) + .add("company", company) + .add("firstName", firstName) + .add("intendedUse", intendedUse) + .add("isFromLighthouse", isFromLighthouse) + .add("lastName", lastName) + .add("listingId", listingId) + .add("recipientType", recipientType) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotificationsOnDurationWarningThresholdExceededItem.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreatePersonalizationRequestResponse.java similarity index 59% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotificationsOnDurationWarningThresholdExceededItem.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreatePersonalizationRequestResponse.java index 826783328..0d8e8ebbd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/WebhookNotificationsOnDurationWarningThresholdExceededItem.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreatePersonalizationRequestResponse.java @@ -1,6 +1,6 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.jobs; +package com.databricks.sdk.service.marketplace; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; @@ -8,12 +8,12 @@ import java.util.Objects; @Generated -public class WebhookNotificationsOnDurationWarningThresholdExceededItem { +public class CreatePersonalizationRequestResponse { /** */ @JsonProperty("id") private String id; - public WebhookNotificationsOnDurationWarningThresholdExceededItem setId(String id) { + public CreatePersonalizationRequestResponse setId(String id) { this.id = id; return this; } @@ -26,8 +26,7 @@ public String getId() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - WebhookNotificationsOnDurationWarningThresholdExceededItem that = - (WebhookNotificationsOnDurationWarningThresholdExceededItem) o; + CreatePersonalizationRequestResponse that = (CreatePersonalizationRequestResponse) o; return Objects.equals(id, that.id); } @@ -38,8 +37,6 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(WebhookNotificationsOnDurationWarningThresholdExceededItem.class) - .add("id", id) - .toString(); + return new ToStringer(CreatePersonalizationRequestResponse.class).add("id", id).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateProviderRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateProviderRequest.java new file mode 100755 index 000000000..ba00a9c20 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateProviderRequest.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateProviderRequest { + /** */ + @JsonProperty("provider") + private ProviderInfo provider; + + public CreateProviderRequest setProvider(ProviderInfo provider) { + this.provider = provider; + return this; + } + + public ProviderInfo getProvider() { + return provider; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateProviderRequest that = (CreateProviderRequest) o; + return Objects.equals(provider, that.provider); + } + + @Override + public int hashCode() { + return Objects.hash(provider); + } + + @Override + public String toString() { + return new ToStringer(CreateProviderRequest.class).add("provider", provider).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateProviderResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateProviderResponse.java new file mode 100755 index 000000000..7308ff060 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/CreateProviderResponse.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class CreateProviderResponse { + /** */ + @JsonProperty("id") + private String id; + + public CreateProviderResponse setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateProviderResponse that = (CreateProviderResponse) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(CreateProviderResponse.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DataRefresh.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DataRefresh.java new file mode 100755 index 000000000..f4d61be6c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DataRefresh.java @@ -0,0 +1,18 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum DataRefresh { + DAILY, + HOURLY, + MINUTE, + MONTHLY, + NONE, + QUARTERLY, + SECOND, + WEEKLY, + YEARLY, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DataRefreshInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DataRefreshInfo.java new file mode 100755 index 000000000..cdb67a342 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DataRefreshInfo.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class DataRefreshInfo { + /** */ + @JsonProperty("interval") + private Long interval; + + /** */ + @JsonProperty("unit") + private DataRefresh unit; + + public DataRefreshInfo setInterval(Long interval) { + this.interval = interval; + return this; + } + + public Long getInterval() { + return interval; + } + + public DataRefreshInfo setUnit(DataRefresh unit) { + this.unit = unit; + return this; + } + + public DataRefresh getUnit() { + return unit; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DataRefreshInfo that = (DataRefreshInfo) o; + return Objects.equals(interval, that.interval) && Objects.equals(unit, that.unit); + } + + @Override + public int hashCode() { + return Objects.hash(interval, unit); + } + + @Override + public String toString() { + return new ToStringer(DataRefreshInfo.class) + .add("interval", interval) + .add("unit", unit) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterRequest.java new file mode 100755 index 000000000..a40208460 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** Delete an exchange filter */ +@Generated +public class DeleteExchangeFilterRequest { + /** */ + private String id; + + public DeleteExchangeFilterRequest setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteExchangeFilterRequest that = (DeleteExchangeFilterRequest) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(DeleteExchangeFilterRequest.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterResponse.java new file mode 100755 index 000000000..b7cca36ab --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeFilterResponse.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +public class DeleteExchangeFilterResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteExchangeFilterResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeRequest.java new file mode 100755 index 000000000..389f9a4c0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** Delete an exchange */ +@Generated +public class DeleteExchangeRequest { + /** */ + private String id; + + public DeleteExchangeRequest setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteExchangeRequest that = (DeleteExchangeRequest) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(DeleteExchangeRequest.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentsUpdated.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeResponse.java similarity index 75% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentsUpdated.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeResponse.java index 3abf5e71c..be2eb59ea 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/iam/WorkspaceAssignmentsUpdated.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteExchangeResponse.java @@ -1,13 +1,13 @@ // Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. -package com.databricks.sdk.service.iam; +package com.databricks.sdk.service.marketplace; import com.databricks.sdk.support.Generated; import com.databricks.sdk.support.ToStringer; import java.util.Objects; @Generated -public class WorkspaceAssignmentsUpdated { +public class DeleteExchangeResponse { @Override public boolean equals(Object o) { @@ -23,6 +23,6 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(WorkspaceAssignmentsUpdated.class).toString(); + return new ToStringer(DeleteExchangeResponse.class).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileRequest.java new file mode 100755 index 000000000..024fcb8e2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** Delete a file */ +@Generated +public class DeleteFileRequest { + /** */ + private String fileId; + + public DeleteFileRequest setFileId(String fileId) { + this.fileId = fileId; + return this; + } + + public String getFileId() { + return fileId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteFileRequest that = (DeleteFileRequest) o; + return Objects.equals(fileId, that.fileId); + } + + @Override + public int hashCode() { + return Objects.hash(fileId); + } + + @Override + public String toString() { + return new ToStringer(DeleteFileRequest.class).add("fileId", fileId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileResponse.java new file mode 100755 index 000000000..f799f3705 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteFileResponse.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +public class DeleteFileResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteFileResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationRequest.java new file mode 100755 index 000000000..f99ff7bcf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationRequest.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** Uninstall from a listing */ +@Generated +public class DeleteInstallationRequest { + /** */ + private String installationId; + + /** */ + private String listingId; + + public DeleteInstallationRequest setInstallationId(String installationId) { + this.installationId = installationId; + return this; + } + + public String getInstallationId() { + return installationId; + } + + public DeleteInstallationRequest setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteInstallationRequest that = (DeleteInstallationRequest) o; + return Objects.equals(installationId, that.installationId) + && Objects.equals(listingId, that.listingId); + } + + @Override + public int hashCode() { + return Objects.hash(installationId, listingId); + } + + @Override + public String toString() { + return new ToStringer(DeleteInstallationRequest.class) + .add("installationId", installationId) + .add("listingId", listingId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationResponse.java new file mode 100755 index 000000000..0928712ff --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteInstallationResponse.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +public class DeleteInstallationResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteInstallationResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingRequest.java new file mode 100755 index 000000000..fc3a8efe4 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** Delete a listing */ +@Generated +public class DeleteListingRequest { + /** */ + private String id; + + public DeleteListingRequest setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteListingRequest that = (DeleteListingRequest) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(DeleteListingRequest.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingResponse.java new file mode 100755 index 000000000..1250054c3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteListingResponse.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +public class DeleteListingResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteListingResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderRequest.java new file mode 100755 index 000000000..cd57b3c0e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** Delete provider */ +@Generated +public class DeleteProviderRequest { + /** */ + private String id; + + public DeleteProviderRequest setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DeleteProviderRequest that = (DeleteProviderRequest) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(DeleteProviderRequest.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderResponse.java new file mode 100755 index 000000000..41f94447f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeleteProviderResponse.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +public class DeleteProviderResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(DeleteProviderResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeltaSharingRecipientType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeltaSharingRecipientType.java new file mode 100755 index 000000000..c6be43522 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/DeltaSharingRecipientType.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum DeltaSharingRecipientType { + DELTA_SHARING_RECIPIENT_TYPE_DATABRICKS, + DELTA_SHARING_RECIPIENT_TYPE_OPEN, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Exchange.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Exchange.java new file mode 100755 index 000000000..ff0fb00ad --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Exchange.java @@ -0,0 +1,166 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class Exchange { + /** */ + @JsonProperty("comment") + private String comment; + + /** */ + @JsonProperty("created_at") + private Long createdAt; + + /** */ + @JsonProperty("created_by") + private String createdBy; + + /** */ + @JsonProperty("filters") + private Collection filters; + + /** */ + @JsonProperty("id") + private String id; + + /** */ + @JsonProperty("linked_listings") + private Collection linkedListings; + + /** */ + @JsonProperty("name") + private String name; + + /** */ + @JsonProperty("updated_at") + private Long updatedAt; + + /** */ + @JsonProperty("updated_by") + private String updatedBy; + + public Exchange setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public Exchange setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public Exchange setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public Exchange setFilters(Collection filters) { + this.filters = filters; + return this; + } + + public Collection getFilters() { + return filters; + } + + public Exchange setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public Exchange setLinkedListings(Collection linkedListings) { + this.linkedListings = linkedListings; + return this; + } + + public Collection getLinkedListings() { + return linkedListings; + } + + public Exchange setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public Exchange setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public Exchange setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Exchange that = (Exchange) o; + return Objects.equals(comment, that.comment) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(filters, that.filters) + && Objects.equals(id, that.id) + && Objects.equals(linkedListings, that.linkedListings) + && Objects.equals(name, that.name) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy); + } + + @Override + public int hashCode() { + return Objects.hash( + comment, createdAt, createdBy, filters, id, linkedListings, name, updatedAt, updatedBy); + } + + @Override + public String toString() { + return new ToStringer(Exchange.class) + .add("comment", comment) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("filters", filters) + .add("id", id) + .add("linkedListings", linkedListings) + .add("name", name) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangeFilter.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangeFilter.java new file mode 100755 index 000000000..85ddd4ac3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangeFilter.java @@ -0,0 +1,165 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class ExchangeFilter { + /** */ + @JsonProperty("created_at") + private Long createdAt; + + /** */ + @JsonProperty("created_by") + private String createdBy; + + /** */ + @JsonProperty("exchange_id") + private String exchangeId; + + /** */ + @JsonProperty("filter_type") + private ExchangeFilterType filterType; + + /** */ + @JsonProperty("filter_value") + private String filterValue; + + /** */ + @JsonProperty("id") + private String id; + + /** */ + @JsonProperty("name") + private String name; + + /** */ + @JsonProperty("updated_at") + private Long updatedAt; + + /** */ + @JsonProperty("updated_by") + private String updatedBy; + + public ExchangeFilter setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public ExchangeFilter setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public ExchangeFilter setExchangeId(String exchangeId) { + this.exchangeId = exchangeId; + return this; + } + + public String getExchangeId() { + return exchangeId; + } + + public ExchangeFilter setFilterType(ExchangeFilterType filterType) { + this.filterType = filterType; + return this; + } + + public ExchangeFilterType getFilterType() { + return filterType; + } + + public ExchangeFilter setFilterValue(String filterValue) { + this.filterValue = filterValue; + return this; + } + + public String getFilterValue() { + return filterValue; + } + + public ExchangeFilter setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public ExchangeFilter setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ExchangeFilter setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public ExchangeFilter setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExchangeFilter that = (ExchangeFilter) o; + return Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(exchangeId, that.exchangeId) + && Objects.equals(filterType, that.filterType) + && Objects.equals(filterValue, that.filterValue) + && Objects.equals(id, that.id) + && Objects.equals(name, that.name) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy); + } + + @Override + public int hashCode() { + return Objects.hash( + createdAt, createdBy, exchangeId, filterType, filterValue, id, name, updatedAt, updatedBy); + } + + @Override + public String toString() { + return new ToStringer(ExchangeFilter.class) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("exchangeId", exchangeId) + .add("filterType", filterType) + .add("filterValue", filterValue) + .add("id", id) + .add("name", name) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangeFilterType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangeFilterType.java new file mode 100755 index 000000000..5a9107544 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangeFilterType.java @@ -0,0 +1,10 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum ExchangeFilterType { + GLOBAL_METASTORE_ID, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangeListing.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangeListing.java new file mode 100755 index 000000000..d149724ff --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ExchangeListing.java @@ -0,0 +1,134 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class ExchangeListing { + /** */ + @JsonProperty("created_at") + private Long createdAt; + + /** */ + @JsonProperty("created_by") + private String createdBy; + + /** */ + @JsonProperty("exchange_id") + private String exchangeId; + + /** */ + @JsonProperty("exchange_name") + private String exchangeName; + + /** */ + @JsonProperty("id") + private String id; + + /** */ + @JsonProperty("listing_id") + private String listingId; + + /** */ + @JsonProperty("listing_name") + private String listingName; + + public ExchangeListing setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public ExchangeListing setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public ExchangeListing setExchangeId(String exchangeId) { + this.exchangeId = exchangeId; + return this; + } + + public String getExchangeId() { + return exchangeId; + } + + public ExchangeListing setExchangeName(String exchangeName) { + this.exchangeName = exchangeName; + return this; + } + + public String getExchangeName() { + return exchangeName; + } + + public ExchangeListing setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public ExchangeListing setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + public ExchangeListing setListingName(String listingName) { + this.listingName = listingName; + return this; + } + + public String getListingName() { + return listingName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExchangeListing that = (ExchangeListing) o; + return Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(exchangeId, that.exchangeId) + && Objects.equals(exchangeName, that.exchangeName) + && Objects.equals(id, that.id) + && Objects.equals(listingId, that.listingId) + && Objects.equals(listingName, that.listingName); + } + + @Override + public int hashCode() { + return Objects.hash(createdAt, createdBy, exchangeId, exchangeName, id, listingId, listingName); + } + + @Override + public String toString() { + return new ToStringer(ExchangeListing.class) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("exchangeId", exchangeId) + .add("exchangeName", exchangeName) + .add("id", id) + .add("listingId", listingId) + .add("listingName", listingName) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileInfo.java new file mode 100755 index 000000000..4c47c50e1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileInfo.java @@ -0,0 +1,189 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class FileInfo { + /** */ + @JsonProperty("created_at") + private Long createdAt; + + /** Name displayed to users for applicable files, e.g. embedded notebooks */ + @JsonProperty("display_name") + private String displayName; + + /** */ + @JsonProperty("download_link") + private String downloadLink; + + /** */ + @JsonProperty("file_parent") + private FileParent fileParent; + + /** */ + @JsonProperty("id") + private String id; + + /** */ + @JsonProperty("marketplace_file_type") + private MarketplaceFileType marketplaceFileType; + + /** */ + @JsonProperty("mime_type") + private String mimeType; + + /** */ + @JsonProperty("status") + private FileStatus status; + + /** Populated if status is in a failed state with more information on reason for the failure. */ + @JsonProperty("status_message") + private String statusMessage; + + /** */ + @JsonProperty("updated_at") + private Long updatedAt; + + public FileInfo setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public FileInfo setDisplayName(String displayName) { + this.displayName = displayName; + return this; + } + + public String getDisplayName() { + return displayName; + } + + public FileInfo setDownloadLink(String downloadLink) { + this.downloadLink = downloadLink; + return this; + } + + public String getDownloadLink() { + return downloadLink; + } + + public FileInfo setFileParent(FileParent fileParent) { + this.fileParent = fileParent; + return this; + } + + public FileParent getFileParent() { + return fileParent; + } + + public FileInfo setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public FileInfo setMarketplaceFileType(MarketplaceFileType marketplaceFileType) { + this.marketplaceFileType = marketplaceFileType; + return this; + } + + public MarketplaceFileType getMarketplaceFileType() { + return marketplaceFileType; + } + + public FileInfo setMimeType(String mimeType) { + this.mimeType = mimeType; + return this; + } + + public String getMimeType() { + return mimeType; + } + + public FileInfo setStatus(FileStatus status) { + this.status = status; + return this; + } + + public FileStatus getStatus() { + return status; + } + + public FileInfo setStatusMessage(String statusMessage) { + this.statusMessage = statusMessage; + return this; + } + + public String getStatusMessage() { + return statusMessage; + } + + public FileInfo setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FileInfo that = (FileInfo) o; + return Objects.equals(createdAt, that.createdAt) + && Objects.equals(displayName, that.displayName) + && Objects.equals(downloadLink, that.downloadLink) + && Objects.equals(fileParent, that.fileParent) + && Objects.equals(id, that.id) + && Objects.equals(marketplaceFileType, that.marketplaceFileType) + && Objects.equals(mimeType, that.mimeType) + && Objects.equals(status, that.status) + && Objects.equals(statusMessage, that.statusMessage) + && Objects.equals(updatedAt, that.updatedAt); + } + + @Override + public int hashCode() { + return Objects.hash( + createdAt, + displayName, + downloadLink, + fileParent, + id, + marketplaceFileType, + mimeType, + status, + statusMessage, + updatedAt); + } + + @Override + public String toString() { + return new ToStringer(FileInfo.class) + .add("createdAt", createdAt) + .add("displayName", displayName) + .add("downloadLink", downloadLink) + .add("fileParent", fileParent) + .add("id", id) + .add("marketplaceFileType", marketplaceFileType) + .add("mimeType", mimeType) + .add("status", status) + .add("statusMessage", statusMessage) + .add("updatedAt", updatedAt) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileParent.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileParent.java new file mode 100755 index 000000000..e2e2ddec5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileParent.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class FileParent { + /** */ + @JsonProperty("file_parent_type") + private FileParentType fileParentType; + + /** TODO make the following fields required */ + @JsonProperty("parent_id") + private String parentId; + + public FileParent setFileParentType(FileParentType fileParentType) { + this.fileParentType = fileParentType; + return this; + } + + public FileParentType getFileParentType() { + return fileParentType; + } + + public FileParent setParentId(String parentId) { + this.parentId = parentId; + return this; + } + + public String getParentId() { + return parentId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FileParent that = (FileParent) o; + return Objects.equals(fileParentType, that.fileParentType) + && Objects.equals(parentId, that.parentId); + } + + @Override + public int hashCode() { + return Objects.hash(fileParentType, parentId); + } + + @Override + public String toString() { + return new ToStringer(FileParent.class) + .add("fileParentType", fileParentType) + .add("parentId", parentId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileParentType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileParentType.java new file mode 100755 index 000000000..55c6496f1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileParentType.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum FileParentType { + LISTING, + PROVIDER, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileStatus.java new file mode 100755 index 000000000..e04bf342f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FileStatus.java @@ -0,0 +1,13 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum FileStatus { + FILE_STATUS_PUBLISHED, + FILE_STATUS_SANITIZATION_FAILED, + FILE_STATUS_SANITIZING, + FILE_STATUS_STAGING, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FilterType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FilterType.java new file mode 100755 index 000000000..ec2977c1a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FilterType.java @@ -0,0 +1,10 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum FilterType { + METASTORE, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FulfillmentType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FulfillmentType.java new file mode 100755 index 000000000..24be65df3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/FulfillmentType.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum FulfillmentType { + INSTALL, + REQUEST_ACCESS, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetExchangeRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetExchangeRequest.java new file mode 100755 index 000000000..4667e191a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetExchangeRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** Get an exchange */ +@Generated +public class GetExchangeRequest { + /** */ + private String id; + + public GetExchangeRequest setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetExchangeRequest that = (GetExchangeRequest) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(GetExchangeRequest.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetExchangeResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetExchangeResponse.java new file mode 100755 index 000000000..bffa34862 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetExchangeResponse.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GetExchangeResponse { + /** */ + @JsonProperty("exchange") + private Exchange exchange; + + public GetExchangeResponse setExchange(Exchange exchange) { + this.exchange = exchange; + return this; + } + + public Exchange getExchange() { + return exchange; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetExchangeResponse that = (GetExchangeResponse) o; + return Objects.equals(exchange, that.exchange); + } + + @Override + public int hashCode() { + return Objects.hash(exchange); + } + + @Override + public String toString() { + return new ToStringer(GetExchangeResponse.class).add("exchange", exchange).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetFileRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetFileRequest.java new file mode 100755 index 000000000..842764489 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetFileRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** Get a file */ +@Generated +public class GetFileRequest { + /** */ + private String fileId; + + public GetFileRequest setFileId(String fileId) { + this.fileId = fileId; + return this; + } + + public String getFileId() { + return fileId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetFileRequest that = (GetFileRequest) o; + return Objects.equals(fileId, that.fileId); + } + + @Override + public int hashCode() { + return Objects.hash(fileId); + } + + @Override + public String toString() { + return new ToStringer(GetFileRequest.class).add("fileId", fileId).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetFileResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetFileResponse.java new file mode 100755 index 000000000..afcacadac --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetFileResponse.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GetFileResponse { + /** */ + @JsonProperty("file_info") + private FileInfo fileInfo; + + public GetFileResponse setFileInfo(FileInfo fileInfo) { + this.fileInfo = fileInfo; + return this; + } + + public FileInfo getFileInfo() { + return fileInfo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetFileResponse that = (GetFileResponse) o; + return Objects.equals(fileInfo, that.fileInfo); + } + + @Override + public int hashCode() { + return Objects.hash(fileInfo); + } + + @Override + public String toString() { + return new ToStringer(GetFileResponse.class).add("fileInfo", fileInfo).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetLatestVersionProviderAnalyticsDashboardResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetLatestVersionProviderAnalyticsDashboardResponse.java new file mode 100755 index 000000000..6ae01e89b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetLatestVersionProviderAnalyticsDashboardResponse.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GetLatestVersionProviderAnalyticsDashboardResponse { + /** version here is latest logical version of the dashboard template */ + @JsonProperty("version") + private Long version; + + public GetLatestVersionProviderAnalyticsDashboardResponse setVersion(Long version) { + this.version = version; + return this; + } + + public Long getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetLatestVersionProviderAnalyticsDashboardResponse that = + (GetLatestVersionProviderAnalyticsDashboardResponse) o; + return Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(version); + } + + @Override + public String toString() { + return new ToStringer(GetLatestVersionProviderAnalyticsDashboardResponse.class) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingContentMetadataRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingContentMetadataRequest.java new file mode 100755 index 000000000..713873a8a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingContentMetadataRequest.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** Get listing content metadata */ +@Generated +public class GetListingContentMetadataRequest { + /** */ + private String listingId; + + /** */ + @QueryParam("page_size") + private Long pageSize; + + /** */ + @QueryParam("page_token") + private String pageToken; + + public GetListingContentMetadataRequest setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + public GetListingContentMetadataRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public GetListingContentMetadataRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetListingContentMetadataRequest that = (GetListingContentMetadataRequest) o; + return Objects.equals(listingId, that.listingId) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(listingId, pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(GetListingContentMetadataRequest.class) + .add("listingId", listingId) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingContentMetadataResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingContentMetadataResponse.java new file mode 100755 index 000000000..37479bc73 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingContentMetadataResponse.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class GetListingContentMetadataResponse { + /** */ + @JsonProperty("next_page_token") + private String nextPageToken; + + /** */ + @JsonProperty("shared_data_objects") + private Collection sharedDataObjects; + + public GetListingContentMetadataResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public GetListingContentMetadataResponse setSharedDataObjects( + Collection sharedDataObjects) { + this.sharedDataObjects = sharedDataObjects; + return this; + } + + public Collection getSharedDataObjects() { + return sharedDataObjects; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetListingContentMetadataResponse that = (GetListingContentMetadataResponse) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(sharedDataObjects, that.sharedDataObjects); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, sharedDataObjects); + } + + @Override + public String toString() { + return new ToStringer(GetListingContentMetadataResponse.class) + .add("nextPageToken", nextPageToken) + .add("sharedDataObjects", sharedDataObjects) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingRequest.java new file mode 100755 index 000000000..80191f286 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** Get listing */ +@Generated +public class GetListingRequest { + /** */ + private String id; + + public GetListingRequest setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetListingRequest that = (GetListingRequest) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(GetListingRequest.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingResponse.java new file mode 100755 index 000000000..720e1f5dd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingResponse.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GetListingResponse { + /** */ + @JsonProperty("listing") + private Listing listing; + + public GetListingResponse setListing(Listing listing) { + this.listing = listing; + return this; + } + + public Listing getListing() { + return listing; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetListingResponse that = (GetListingResponse) o; + return Objects.equals(listing, that.listing); + } + + @Override + public int hashCode() { + return Objects.hash(listing); + } + + @Override + public String toString() { + return new ToStringer(GetListingResponse.class).add("listing", listing).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingsRequest.java new file mode 100755 index 000000000..f871ee01f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingsRequest.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** List listings */ +@Generated +public class GetListingsRequest { + /** */ + @QueryParam("page_size") + private Long pageSize; + + /** */ + @QueryParam("page_token") + private String pageToken; + + public GetListingsRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public GetListingsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetListingsRequest that = (GetListingsRequest) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(GetListingsRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingsResponse.java new file mode 100755 index 000000000..d466edf57 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetListingsResponse.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class GetListingsResponse { + /** */ + @JsonProperty("listings") + private Collection

listings; + + /** */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public GetListingsResponse setListings(Collection listings) { + this.listings = listings; + return this; + } + + public Collection getListings() { + return listings; + } + + public GetListingsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetListingsResponse that = (GetListingsResponse) o; + return Objects.equals(listings, that.listings) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(listings, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(GetListingsResponse.class) + .add("listings", listings) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetPersonalizationRequestRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetPersonalizationRequestRequest.java new file mode 100755 index 000000000..ef5401d18 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetPersonalizationRequestRequest.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** Get the personalization request for a listing */ +@Generated +public class GetPersonalizationRequestRequest { + /** */ + private String listingId; + + public GetPersonalizationRequestRequest setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPersonalizationRequestRequest that = (GetPersonalizationRequestRequest) o; + return Objects.equals(listingId, that.listingId); + } + + @Override + public int hashCode() { + return Objects.hash(listingId); + } + + @Override + public String toString() { + return new ToStringer(GetPersonalizationRequestRequest.class) + .add("listingId", listingId) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetPersonalizationRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetPersonalizationRequestResponse.java new file mode 100755 index 000000000..97c64cb26 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetPersonalizationRequestResponse.java @@ -0,0 +1,46 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class GetPersonalizationRequestResponse { + /** */ + @JsonProperty("personalization_requests") + private Collection personalizationRequests; + + public GetPersonalizationRequestResponse setPersonalizationRequests( + Collection personalizationRequests) { + this.personalizationRequests = personalizationRequests; + return this; + } + + public Collection getPersonalizationRequests() { + return personalizationRequests; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetPersonalizationRequestResponse that = (GetPersonalizationRequestResponse) o; + return Objects.equals(personalizationRequests, that.personalizationRequests); + } + + @Override + public int hashCode() { + return Objects.hash(personalizationRequests); + } + + @Override + public String toString() { + return new ToStringer(GetPersonalizationRequestResponse.class) + .add("personalizationRequests", personalizationRequests) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetProviderRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetProviderRequest.java new file mode 100755 index 000000000..4f800c7d3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetProviderRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** Get a provider */ +@Generated +public class GetProviderRequest { + /** */ + private String id; + + public GetProviderRequest setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetProviderRequest that = (GetProviderRequest) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(GetProviderRequest.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetProviderResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetProviderResponse.java new file mode 100755 index 000000000..0330f3f34 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/GetProviderResponse.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class GetProviderResponse { + /** */ + @JsonProperty("provider") + private ProviderInfo provider; + + public GetProviderResponse setProvider(ProviderInfo provider) { + this.provider = provider; + return this; + } + + public ProviderInfo getProvider() { + return provider; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetProviderResponse that = (GetProviderResponse) o; + return Objects.equals(provider, that.provider); + } + + @Override + public int hashCode() { + return Objects.hash(provider); + } + + @Override + public String toString() { + return new ToStringer(GetProviderResponse.class).add("provider", provider).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Installation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Installation.java new file mode 100755 index 000000000..3375b657d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Installation.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class Installation { + /** */ + @JsonProperty("installation") + private InstallationDetail installation; + + public Installation setInstallation(InstallationDetail installation) { + this.installation = installation; + return this; + } + + public InstallationDetail getInstallation() { + return installation; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Installation that = (Installation) o; + return Objects.equals(installation, that.installation); + } + + @Override + public int hashCode() { + return Objects.hash(installation); + } + + @Override + public String toString() { + return new ToStringer(Installation.class).add("installation", installation).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/InstallationDetail.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/InstallationDetail.java new file mode 100755 index 000000000..c7a3574b9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/InstallationDetail.java @@ -0,0 +1,238 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class InstallationDetail { + /** */ + @JsonProperty("catalog_name") + private String catalogName; + + /** */ + @JsonProperty("error_message") + private String errorMessage; + + /** */ + @JsonProperty("id") + private String id; + + /** */ + @JsonProperty("installed_on") + private Long installedOn; + + /** */ + @JsonProperty("listing_id") + private String listingId; + + /** */ + @JsonProperty("listing_name") + private String listingName; + + /** */ + @JsonProperty("recipient_type") + private DeltaSharingRecipientType recipientType; + + /** */ + @JsonProperty("repo_name") + private String repoName; + + /** */ + @JsonProperty("repo_path") + private String repoPath; + + /** */ + @JsonProperty("share_name") + private String shareName; + + /** */ + @JsonProperty("status") + private InstallationStatus status; + + /** */ + @JsonProperty("token_detail") + private TokenDetail tokenDetail; + + /** */ + @JsonProperty("tokens") + private Collection tokens; + + public InstallationDetail setCatalogName(String catalogName) { + this.catalogName = catalogName; + return this; + } + + public String getCatalogName() { + return catalogName; + } + + public InstallationDetail setErrorMessage(String errorMessage) { + this.errorMessage = errorMessage; + return this; + } + + public String getErrorMessage() { + return errorMessage; + } + + public InstallationDetail setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public InstallationDetail setInstalledOn(Long installedOn) { + this.installedOn = installedOn; + return this; + } + + public Long getInstalledOn() { + return installedOn; + } + + public InstallationDetail setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + public InstallationDetail setListingName(String listingName) { + this.listingName = listingName; + return this; + } + + public String getListingName() { + return listingName; + } + + public InstallationDetail setRecipientType(DeltaSharingRecipientType recipientType) { + this.recipientType = recipientType; + return this; + } + + public DeltaSharingRecipientType getRecipientType() { + return recipientType; + } + + public InstallationDetail setRepoName(String repoName) { + this.repoName = repoName; + return this; + } + + public String getRepoName() { + return repoName; + } + + public InstallationDetail setRepoPath(String repoPath) { + this.repoPath = repoPath; + return this; + } + + public String getRepoPath() { + return repoPath; + } + + public InstallationDetail setShareName(String shareName) { + this.shareName = shareName; + return this; + } + + public String getShareName() { + return shareName; + } + + public InstallationDetail setStatus(InstallationStatus status) { + this.status = status; + return this; + } + + public InstallationStatus getStatus() { + return status; + } + + public InstallationDetail setTokenDetail(TokenDetail tokenDetail) { + this.tokenDetail = tokenDetail; + return this; + } + + public TokenDetail getTokenDetail() { + return tokenDetail; + } + + public InstallationDetail setTokens(Collection tokens) { + this.tokens = tokens; + return this; + } + + public Collection getTokens() { + return tokens; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InstallationDetail that = (InstallationDetail) o; + return Objects.equals(catalogName, that.catalogName) + && Objects.equals(errorMessage, that.errorMessage) + && Objects.equals(id, that.id) + && Objects.equals(installedOn, that.installedOn) + && Objects.equals(listingId, that.listingId) + && Objects.equals(listingName, that.listingName) + && Objects.equals(recipientType, that.recipientType) + && Objects.equals(repoName, that.repoName) + && Objects.equals(repoPath, that.repoPath) + && Objects.equals(shareName, that.shareName) + && Objects.equals(status, that.status) + && Objects.equals(tokenDetail, that.tokenDetail) + && Objects.equals(tokens, that.tokens); + } + + @Override + public int hashCode() { + return Objects.hash( + catalogName, + errorMessage, + id, + installedOn, + listingId, + listingName, + recipientType, + repoName, + repoPath, + shareName, + status, + tokenDetail, + tokens); + } + + @Override + public String toString() { + return new ToStringer(InstallationDetail.class) + .add("catalogName", catalogName) + .add("errorMessage", errorMessage) + .add("id", id) + .add("installedOn", installedOn) + .add("listingId", listingId) + .add("listingName", listingName) + .add("recipientType", recipientType) + .add("repoName", repoName) + .add("repoPath", repoPath) + .add("shareName", shareName) + .add("status", status) + .add("tokenDetail", tokenDetail) + .add("tokens", tokens) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/InstallationStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/InstallationStatus.java new file mode 100755 index 000000000..a23553964 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/InstallationStatus.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum InstallationStatus { + FAILED, + INSTALLED, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllInstallationsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllInstallationsRequest.java new file mode 100755 index 000000000..fa9f837c1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllInstallationsRequest.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** List all installations */ +@Generated +public class ListAllInstallationsRequest { + /** */ + @QueryParam("page_size") + private Long pageSize; + + /** */ + @QueryParam("page_token") + private String pageToken; + + public ListAllInstallationsRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListAllInstallationsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAllInstallationsRequest that = (ListAllInstallationsRequest) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListAllInstallationsRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllInstallationsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllInstallationsResponse.java new file mode 100755 index 000000000..9e19e2be8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllInstallationsResponse.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListAllInstallationsResponse { + /** */ + @JsonProperty("installations") + private Collection installations; + + /** */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListAllInstallationsResponse setInstallations( + Collection installations) { + this.installations = installations; + return this; + } + + public Collection getInstallations() { + return installations; + } + + public ListAllInstallationsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAllInstallationsResponse that = (ListAllInstallationsResponse) o; + return Objects.equals(installations, that.installations) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(installations, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListAllInstallationsResponse.class) + .add("installations", installations) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllPersonalizationRequestsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllPersonalizationRequestsRequest.java new file mode 100755 index 000000000..a9daf80cb --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllPersonalizationRequestsRequest.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** List all personalization requests */ +@Generated +public class ListAllPersonalizationRequestsRequest { + /** */ + @QueryParam("page_size") + private Long pageSize; + + /** */ + @QueryParam("page_token") + private String pageToken; + + public ListAllPersonalizationRequestsRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListAllPersonalizationRequestsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAllPersonalizationRequestsRequest that = (ListAllPersonalizationRequestsRequest) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListAllPersonalizationRequestsRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllPersonalizationRequestsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllPersonalizationRequestsResponse.java new file mode 100755 index 000000000..6ab6333f1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListAllPersonalizationRequestsResponse.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListAllPersonalizationRequestsResponse { + /** */ + @JsonProperty("next_page_token") + private String nextPageToken; + + /** */ + @JsonProperty("personalization_requests") + private Collection personalizationRequests; + + public ListAllPersonalizationRequestsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListAllPersonalizationRequestsResponse setPersonalizationRequests( + Collection personalizationRequests) { + this.personalizationRequests = personalizationRequests; + return this; + } + + public Collection getPersonalizationRequests() { + return personalizationRequests; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListAllPersonalizationRequestsResponse that = (ListAllPersonalizationRequestsResponse) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(personalizationRequests, that.personalizationRequests); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, personalizationRequests); + } + + @Override + public String toString() { + return new ToStringer(ListAllPersonalizationRequestsResponse.class) + .add("nextPageToken", nextPageToken) + .add("personalizationRequests", personalizationRequests) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangeFiltersRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangeFiltersRequest.java new file mode 100755 index 000000000..c5f15252f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangeFiltersRequest.java @@ -0,0 +1,75 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** List exchange filters */ +@Generated +public class ListExchangeFiltersRequest { + /** */ + @QueryParam("exchange_id") + private String exchangeId; + + /** */ + @QueryParam("page_size") + private Long pageSize; + + /** */ + @QueryParam("page_token") + private String pageToken; + + public ListExchangeFiltersRequest setExchangeId(String exchangeId) { + this.exchangeId = exchangeId; + return this; + } + + public String getExchangeId() { + return exchangeId; + } + + public ListExchangeFiltersRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListExchangeFiltersRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListExchangeFiltersRequest that = (ListExchangeFiltersRequest) o; + return Objects.equals(exchangeId, that.exchangeId) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(exchangeId, pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListExchangeFiltersRequest.class) + .add("exchangeId", exchangeId) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangeFiltersResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangeFiltersResponse.java new file mode 100755 index 000000000..f39d977e3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangeFiltersResponse.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListExchangeFiltersResponse { + /** */ + @JsonProperty("filters") + private Collection filters; + + /** */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListExchangeFiltersResponse setFilters(Collection filters) { + this.filters = filters; + return this; + } + + public Collection getFilters() { + return filters; + } + + public ListExchangeFiltersResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListExchangeFiltersResponse that = (ListExchangeFiltersResponse) o; + return Objects.equals(filters, that.filters) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(filters, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListExchangeFiltersResponse.class) + .add("filters", filters) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesForListingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesForListingRequest.java new file mode 100755 index 000000000..c03568776 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesForListingRequest.java @@ -0,0 +1,75 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** List exchanges for listing */ +@Generated +public class ListExchangesForListingRequest { + /** */ + @QueryParam("listing_id") + private String listingId; + + /** */ + @QueryParam("page_size") + private Long pageSize; + + /** */ + @QueryParam("page_token") + private String pageToken; + + public ListExchangesForListingRequest setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + public ListExchangesForListingRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListExchangesForListingRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListExchangesForListingRequest that = (ListExchangesForListingRequest) o; + return Objects.equals(listingId, that.listingId) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(listingId, pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListExchangesForListingRequest.class) + .add("listingId", listingId) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesForListingResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesForListingResponse.java new file mode 100755 index 000000000..7b1fb1990 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesForListingResponse.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListExchangesForListingResponse { + /** */ + @JsonProperty("exchange_listing") + private Collection exchangeListing; + + /** */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListExchangesForListingResponse setExchangeListing( + Collection exchangeListing) { + this.exchangeListing = exchangeListing; + return this; + } + + public Collection getExchangeListing() { + return exchangeListing; + } + + public ListExchangesForListingResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListExchangesForListingResponse that = (ListExchangesForListingResponse) o; + return Objects.equals(exchangeListing, that.exchangeListing) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(exchangeListing, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListExchangesForListingResponse.class) + .add("exchangeListing", exchangeListing) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesRequest.java new file mode 100755 index 000000000..3d92e883d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesRequest.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** List exchanges */ +@Generated +public class ListExchangesRequest { + /** */ + @QueryParam("page_size") + private Long pageSize; + + /** */ + @QueryParam("page_token") + private String pageToken; + + public ListExchangesRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListExchangesRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListExchangesRequest that = (ListExchangesRequest) o; + return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListExchangesRequest.class) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesResponse.java new file mode 100755 index 000000000..a3340d21e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListExchangesResponse.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListExchangesResponse { + /** */ + @JsonProperty("exchanges") + private Collection exchanges; + + /** */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListExchangesResponse setExchanges(Collection exchanges) { + this.exchanges = exchanges; + return this; + } + + public Collection getExchanges() { + return exchanges; + } + + public ListExchangesResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListExchangesResponse that = (ListExchangesResponse) o; + return Objects.equals(exchanges, that.exchanges) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(exchanges, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListExchangesResponse.class) + .add("exchanges", exchanges) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFilesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFilesRequest.java new file mode 100755 index 000000000..b35fb4db6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFilesRequest.java @@ -0,0 +1,75 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** List files */ +@Generated +public class ListFilesRequest { + /** */ + @QueryParam("file_parent") + private FileParent fileParent; + + /** */ + @QueryParam("page_size") + private Long pageSize; + + /** */ + @QueryParam("page_token") + private String pageToken; + + public ListFilesRequest setFileParent(FileParent fileParent) { + this.fileParent = fileParent; + return this; + } + + public FileParent getFileParent() { + return fileParent; + } + + public ListFilesRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListFilesRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListFilesRequest that = (ListFilesRequest) o; + return Objects.equals(fileParent, that.fileParent) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(fileParent, pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListFilesRequest.class) + .add("fileParent", fileParent) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFilesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFilesResponse.java new file mode 100755 index 000000000..cb90f3e58 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFilesResponse.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListFilesResponse { + /** */ + @JsonProperty("file_infos") + private Collection fileInfos; + + /** */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListFilesResponse setFileInfos(Collection fileInfos) { + this.fileInfos = fileInfos; + return this; + } + + public Collection getFileInfos() { + return fileInfos; + } + + public ListFilesResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListFilesResponse that = (ListFilesResponse) o; + return Objects.equals(fileInfos, that.fileInfos) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(fileInfos, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListFilesResponse.class) + .add("fileInfos", fileInfos) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFulfillmentsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFulfillmentsRequest.java new file mode 100755 index 000000000..9b92d900d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFulfillmentsRequest.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** List all listing fulfillments */ +@Generated +public class ListFulfillmentsRequest { + /** */ + private String listingId; + + /** */ + @QueryParam("page_size") + private Long pageSize; + + /** */ + @QueryParam("page_token") + private String pageToken; + + public ListFulfillmentsRequest setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + public ListFulfillmentsRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListFulfillmentsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListFulfillmentsRequest that = (ListFulfillmentsRequest) o; + return Objects.equals(listingId, that.listingId) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(listingId, pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListFulfillmentsRequest.class) + .add("listingId", listingId) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFulfillmentsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFulfillmentsResponse.java new file mode 100755 index 000000000..ca31df7d8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListFulfillmentsResponse.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListFulfillmentsResponse { + /** */ + @JsonProperty("fulfillments") + private Collection fulfillments; + + /** */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListFulfillmentsResponse setFulfillments(Collection fulfillments) { + this.fulfillments = fulfillments; + return this; + } + + public Collection getFulfillments() { + return fulfillments; + } + + public ListFulfillmentsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListFulfillmentsResponse that = (ListFulfillmentsResponse) o; + return Objects.equals(fulfillments, that.fulfillments) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(fulfillments, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListFulfillmentsResponse.class) + .add("fulfillments", fulfillments) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListInstallationsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListInstallationsRequest.java new file mode 100755 index 000000000..9b50c64a7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListInstallationsRequest.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** List installations for a listing */ +@Generated +public class ListInstallationsRequest { + /** */ + private String listingId; + + /** */ + @QueryParam("page_size") + private Long pageSize; + + /** */ + @QueryParam("page_token") + private String pageToken; + + public ListInstallationsRequest setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + public ListInstallationsRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListInstallationsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListInstallationsRequest that = (ListInstallationsRequest) o; + return Objects.equals(listingId, that.listingId) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(listingId, pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListInstallationsRequest.class) + .add("listingId", listingId) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListInstallationsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListInstallationsResponse.java new file mode 100755 index 000000000..3a4b12401 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListInstallationsResponse.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListInstallationsResponse { + /** */ + @JsonProperty("installations") + private Collection installations; + + /** */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListInstallationsResponse setInstallations(Collection installations) { + this.installations = installations; + return this; + } + + public Collection getInstallations() { + return installations; + } + + public ListInstallationsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListInstallationsResponse that = (ListInstallationsResponse) o; + return Objects.equals(installations, that.installations) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(installations, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListInstallationsResponse.class) + .add("installations", installations) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsForExchangeRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsForExchangeRequest.java new file mode 100755 index 000000000..5d8522345 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsForExchangeRequest.java @@ -0,0 +1,75 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** List listings for exchange */ +@Generated +public class ListListingsForExchangeRequest { + /** */ + @QueryParam("exchange_id") + private String exchangeId; + + /** */ + @QueryParam("page_size") + private Long pageSize; + + /** */ + @QueryParam("page_token") + private String pageToken; + + public ListListingsForExchangeRequest setExchangeId(String exchangeId) { + this.exchangeId = exchangeId; + return this; + } + + public String getExchangeId() { + return exchangeId; + } + + public ListListingsForExchangeRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListListingsForExchangeRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListListingsForExchangeRequest that = (ListListingsForExchangeRequest) o; + return Objects.equals(exchangeId, that.exchangeId) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(exchangeId, pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListListingsForExchangeRequest.class) + .add("exchangeId", exchangeId) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsForExchangeResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsForExchangeResponse.java new file mode 100755 index 000000000..b88884f8e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsForExchangeResponse.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListListingsForExchangeResponse { + /** */ + @JsonProperty("exchange_listings") + private Collection exchangeListings; + + /** */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListListingsForExchangeResponse setExchangeListings( + Collection exchangeListings) { + this.exchangeListings = exchangeListings; + return this; + } + + public Collection getExchangeListings() { + return exchangeListings; + } + + public ListListingsForExchangeResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListListingsForExchangeResponse that = (ListListingsForExchangeResponse) o; + return Objects.equals(exchangeListings, that.exchangeListings) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(exchangeListings, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListListingsForExchangeResponse.class) + .add("exchangeListings", exchangeListings) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsRequest.java new file mode 100755 index 000000000..9da775dba --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsRequest.java @@ -0,0 +1,191 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import java.util.Collection; +import java.util.Objects; + +/** List listings */ +@Generated +public class ListListingsRequest { + /** Matches any of the following asset types */ + @QueryParam("assets") + private Collection assets; + + /** Matches any of the following categories */ + @QueryParam("categories") + private Collection categories; + + /** Filters each listing based on if it is free. */ + @QueryParam("is_free") + private Boolean isFree; + + /** Filters each listing based on if it is a private exchange. */ + @QueryParam("is_private_exchange") + private Boolean isPrivateExchange; + + /** Filters each listing based on whether it is a staff pick. */ + @QueryParam("is_staff_pick") + private Boolean isStaffPick; + + /** */ + @QueryParam("page_size") + private Long pageSize; + + /** */ + @QueryParam("page_token") + private String pageToken; + + /** Matches any of the following provider ids */ + @QueryParam("provider_ids") + private Collection providerIds; + + /** Criteria for sorting the resulting set of listings. */ + @QueryParam("sort_by_spec") + private SortBySpec sortBySpec; + + /** Matches any of the following tags */ + @QueryParam("tags") + private Collection tags; + + public ListListingsRequest setAssets(Collection assets) { + this.assets = assets; + return this; + } + + public Collection getAssets() { + return assets; + } + + public ListListingsRequest setCategories(Collection categories) { + this.categories = categories; + return this; + } + + public Collection getCategories() { + return categories; + } + + public ListListingsRequest setIsFree(Boolean isFree) { + this.isFree = isFree; + return this; + } + + public Boolean getIsFree() { + return isFree; + } + + public ListListingsRequest setIsPrivateExchange(Boolean isPrivateExchange) { + this.isPrivateExchange = isPrivateExchange; + return this; + } + + public Boolean getIsPrivateExchange() { + return isPrivateExchange; + } + + public ListListingsRequest setIsStaffPick(Boolean isStaffPick) { + this.isStaffPick = isStaffPick; + return this; + } + + public Boolean getIsStaffPick() { + return isStaffPick; + } + + public ListListingsRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListListingsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public ListListingsRequest setProviderIds(Collection providerIds) { + this.providerIds = providerIds; + return this; + } + + public Collection getProviderIds() { + return providerIds; + } + + public ListListingsRequest setSortBySpec(SortBySpec sortBySpec) { + this.sortBySpec = sortBySpec; + return this; + } + + public SortBySpec getSortBySpec() { + return sortBySpec; + } + + public ListListingsRequest setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListListingsRequest that = (ListListingsRequest) o; + return Objects.equals(assets, that.assets) + && Objects.equals(categories, that.categories) + && Objects.equals(isFree, that.isFree) + && Objects.equals(isPrivateExchange, that.isPrivateExchange) + && Objects.equals(isStaffPick, that.isStaffPick) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(providerIds, that.providerIds) + && Objects.equals(sortBySpec, that.sortBySpec) + && Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash( + assets, + categories, + isFree, + isPrivateExchange, + isStaffPick, + pageSize, + pageToken, + providerIds, + sortBySpec, + tags); + } + + @Override + public String toString() { + return new ToStringer(ListListingsRequest.class) + .add("assets", assets) + .add("categories", categories) + .add("isFree", isFree) + .add("isPrivateExchange", isPrivateExchange) + .add("isStaffPick", isStaffPick) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .add("providerIds", providerIds) + .add("sortBySpec", sortBySpec) + .add("tags", tags) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsResponse.java new file mode 100755 index 000000000..1ec5cf42c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListListingsResponse.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListListingsResponse { + /** */ + @JsonProperty("listings") + private Collection listings; + + /** */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public ListListingsResponse setListings(Collection listings) { + this.listings = listings; + return this; + } + + public Collection getListings() { + return listings; + } + + public ListListingsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListListingsResponse that = (ListListingsResponse) o; + return Objects.equals(listings, that.listings) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(listings, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(ListListingsResponse.class) + .add("listings", listings) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProviderAnalyticsDashboardResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProviderAnalyticsDashboardResponse.java new file mode 100755 index 000000000..43dc6cef5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProviderAnalyticsDashboardResponse.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class ListProviderAnalyticsDashboardResponse { + /** dashboard_id will be used to open Lakeview dashboard. */ + @JsonProperty("dashboard_id") + private String dashboardId; + + /** */ + @JsonProperty("id") + private String id; + + /** */ + @JsonProperty("version") + private Long version; + + public ListProviderAnalyticsDashboardResponse setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public ListProviderAnalyticsDashboardResponse setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public ListProviderAnalyticsDashboardResponse setVersion(Long version) { + this.version = version; + return this; + } + + public Long getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListProviderAnalyticsDashboardResponse that = (ListProviderAnalyticsDashboardResponse) o; + return Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(id, that.id) + && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId, id, version); + } + + @Override + public String toString() { + return new ToStringer(ListProviderAnalyticsDashboardResponse.class) + .add("dashboardId", dashboardId) + .add("id", id) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProvidersRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProvidersRequest.java new file mode 100755 index 000000000..a0b78f6cd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProvidersRequest.java @@ -0,0 +1,75 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** List providers */ +@Generated +public class ListProvidersRequest { + /** */ + @QueryParam("is_featured") + private Boolean isFeatured; + + /** */ + @QueryParam("page_size") + private Long pageSize; + + /** */ + @QueryParam("page_token") + private String pageToken; + + public ListProvidersRequest setIsFeatured(Boolean isFeatured) { + this.isFeatured = isFeatured; + return this; + } + + public Boolean getIsFeatured() { + return isFeatured; + } + + public ListProvidersRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public ListProvidersRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListProvidersRequest that = (ListProvidersRequest) o; + return Objects.equals(isFeatured, that.isFeatured) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken); + } + + @Override + public int hashCode() { + return Objects.hash(isFeatured, pageSize, pageToken); + } + + @Override + public String toString() { + return new ToStringer(ListProvidersRequest.class) + .add("isFeatured", isFeatured) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProvidersResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProvidersResponse.java new file mode 100755 index 000000000..e91d16bd7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListProvidersResponse.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListProvidersResponse { + /** */ + @JsonProperty("next_page_token") + private String nextPageToken; + + /** */ + @JsonProperty("providers") + private Collection providers; + + public ListProvidersResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + public ListProvidersResponse setProviders(Collection providers) { + this.providers = providers; + return this; + } + + public Collection getProviders() { + return providers; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListProvidersResponse that = (ListProvidersResponse) o; + return Objects.equals(nextPageToken, that.nextPageToken) + && Objects.equals(providers, that.providers); + } + + @Override + public int hashCode() { + return Objects.hash(nextPageToken, providers); + } + + @Override + public String toString() { + return new ToStringer(ListProvidersResponse.class) + .add("nextPageToken", nextPageToken) + .add("providers", providers) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Listing.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Listing.java new file mode 100755 index 000000000..f73465e34 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Listing.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class Listing { + /** */ + @JsonProperty("detail") + private ListingDetail detail; + + /** */ + @JsonProperty("id") + private String id; + + /** Next Number: 26 */ + @JsonProperty("summary") + private ListingSummary summary; + + public Listing setDetail(ListingDetail detail) { + this.detail = detail; + return this; + } + + public ListingDetail getDetail() { + return detail; + } + + public Listing setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public Listing setSummary(ListingSummary summary) { + this.summary = summary; + return this; + } + + public ListingSummary getSummary() { + return summary; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Listing that = (Listing) o; + return Objects.equals(detail, that.detail) + && Objects.equals(id, that.id) + && Objects.equals(summary, that.summary); + } + + @Override + public int hashCode() { + return Objects.hash(detail, id, summary); + } + + @Override + public String toString() { + return new ToStringer(Listing.class) + .add("detail", detail) + .add("id", id) + .add("summary", summary) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingDetail.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingDetail.java new file mode 100755 index 000000000..dcf891232 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingDetail.java @@ -0,0 +1,348 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListingDetail { + /** Type of assets included in the listing. eg. GIT_REPO, DATA_TABLE, MODEL, NOTEBOOK */ + @JsonProperty("assets") + private Collection assets; + + /** The ending date timestamp for when the data spans */ + @JsonProperty("collection_date_end") + private Long collectionDateEnd; + + /** The starting date timestamp for when the data spans */ + @JsonProperty("collection_date_start") + private Long collectionDateStart; + + /** Smallest unit of time in the dataset */ + @JsonProperty("collection_granularity") + private DataRefreshInfo collectionGranularity; + + /** Whether the dataset is free or paid */ + @JsonProperty("cost") + private Cost cost; + + /** Where/how the data is sourced */ + @JsonProperty("data_source") + private String dataSource; + + /** */ + @JsonProperty("description") + private String description; + + /** */ + @JsonProperty("documentation_link") + private String documentationLink; + + /** */ + @JsonProperty("embedded_notebook_file_infos") + private Collection embeddedNotebookFileInfos; + + /** */ + @JsonProperty("file_ids") + private Collection fileIds; + + /** Which geo region the listing data is collected from */ + @JsonProperty("geographical_coverage") + private String geographicalCoverage; + + /** + * ID 20, 21 removed don't use License of the data asset - Required for listings with model based + * assets + */ + @JsonProperty("license") + private String license; + + /** + * What the pricing model is (e.g. paid, subscription, paid upfront); should only be present if + * cost is paid TODO: Not used yet, should deprecate if we will never use it + */ + @JsonProperty("pricing_model") + private String pricingModel; + + /** */ + @JsonProperty("privacy_policy_link") + private String privacyPolicyLink; + + /** size of the dataset in GB */ + @JsonProperty("size") + private Double size; + + /** */ + @JsonProperty("support_link") + private String supportLink; + + /** + * Listing tags - Simple key value pair to annotate listings. When should I use tags vs dedicated + * fields? Using tags avoids the need to add new columns in the database for new annotations. + * However, this should be used sparingly since tags are stored as key value pair. Use tags only: + * 1. If the field is optional and won't need to have NOT NULL integrity check 2. The value is + * fairly fixed, static and low cardinality (eg. enums). 3. The value won't be used in filters or + * joins with other tables. + */ + @JsonProperty("tags") + private Collection tags; + + /** */ + @JsonProperty("terms_of_service") + private String termsOfService; + + /** How often data is updated */ + @JsonProperty("update_frequency") + private DataRefreshInfo updateFrequency; + + public ListingDetail setAssets(Collection assets) { + this.assets = assets; + return this; + } + + public Collection getAssets() { + return assets; + } + + public ListingDetail setCollectionDateEnd(Long collectionDateEnd) { + this.collectionDateEnd = collectionDateEnd; + return this; + } + + public Long getCollectionDateEnd() { + return collectionDateEnd; + } + + public ListingDetail setCollectionDateStart(Long collectionDateStart) { + this.collectionDateStart = collectionDateStart; + return this; + } + + public Long getCollectionDateStart() { + return collectionDateStart; + } + + public ListingDetail setCollectionGranularity(DataRefreshInfo collectionGranularity) { + this.collectionGranularity = collectionGranularity; + return this; + } + + public DataRefreshInfo getCollectionGranularity() { + return collectionGranularity; + } + + public ListingDetail setCost(Cost cost) { + this.cost = cost; + return this; + } + + public Cost getCost() { + return cost; + } + + public ListingDetail setDataSource(String dataSource) { + this.dataSource = dataSource; + return this; + } + + public String getDataSource() { + return dataSource; + } + + public ListingDetail setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public ListingDetail setDocumentationLink(String documentationLink) { + this.documentationLink = documentationLink; + return this; + } + + public String getDocumentationLink() { + return documentationLink; + } + + public ListingDetail setEmbeddedNotebookFileInfos( + Collection embeddedNotebookFileInfos) { + this.embeddedNotebookFileInfos = embeddedNotebookFileInfos; + return this; + } + + public Collection getEmbeddedNotebookFileInfos() { + return embeddedNotebookFileInfos; + } + + public ListingDetail setFileIds(Collection fileIds) { + this.fileIds = fileIds; + return this; + } + + public Collection getFileIds() { + return fileIds; + } + + public ListingDetail setGeographicalCoverage(String geographicalCoverage) { + this.geographicalCoverage = geographicalCoverage; + return this; + } + + public String getGeographicalCoverage() { + return geographicalCoverage; + } + + public ListingDetail setLicense(String license) { + this.license = license; + return this; + } + + public String getLicense() { + return license; + } + + public ListingDetail setPricingModel(String pricingModel) { + this.pricingModel = pricingModel; + return this; + } + + public String getPricingModel() { + return pricingModel; + } + + public ListingDetail setPrivacyPolicyLink(String privacyPolicyLink) { + this.privacyPolicyLink = privacyPolicyLink; + return this; + } + + public String getPrivacyPolicyLink() { + return privacyPolicyLink; + } + + public ListingDetail setSize(Double size) { + this.size = size; + return this; + } + + public Double getSize() { + return size; + } + + public ListingDetail setSupportLink(String supportLink) { + this.supportLink = supportLink; + return this; + } + + public String getSupportLink() { + return supportLink; + } + + public ListingDetail setTags(Collection tags) { + this.tags = tags; + return this; + } + + public Collection getTags() { + return tags; + } + + public ListingDetail setTermsOfService(String termsOfService) { + this.termsOfService = termsOfService; + return this; + } + + public String getTermsOfService() { + return termsOfService; + } + + public ListingDetail setUpdateFrequency(DataRefreshInfo updateFrequency) { + this.updateFrequency = updateFrequency; + return this; + } + + public DataRefreshInfo getUpdateFrequency() { + return updateFrequency; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListingDetail that = (ListingDetail) o; + return Objects.equals(assets, that.assets) + && Objects.equals(collectionDateEnd, that.collectionDateEnd) + && Objects.equals(collectionDateStart, that.collectionDateStart) + && Objects.equals(collectionGranularity, that.collectionGranularity) + && Objects.equals(cost, that.cost) + && Objects.equals(dataSource, that.dataSource) + && Objects.equals(description, that.description) + && Objects.equals(documentationLink, that.documentationLink) + && Objects.equals(embeddedNotebookFileInfos, that.embeddedNotebookFileInfos) + && Objects.equals(fileIds, that.fileIds) + && Objects.equals(geographicalCoverage, that.geographicalCoverage) + && Objects.equals(license, that.license) + && Objects.equals(pricingModel, that.pricingModel) + && Objects.equals(privacyPolicyLink, that.privacyPolicyLink) + && Objects.equals(size, that.size) + && Objects.equals(supportLink, that.supportLink) + && Objects.equals(tags, that.tags) + && Objects.equals(termsOfService, that.termsOfService) + && Objects.equals(updateFrequency, that.updateFrequency); + } + + @Override + public int hashCode() { + return Objects.hash( + assets, + collectionDateEnd, + collectionDateStart, + collectionGranularity, + cost, + dataSource, + description, + documentationLink, + embeddedNotebookFileInfos, + fileIds, + geographicalCoverage, + license, + pricingModel, + privacyPolicyLink, + size, + supportLink, + tags, + termsOfService, + updateFrequency); + } + + @Override + public String toString() { + return new ToStringer(ListingDetail.class) + .add("assets", assets) + .add("collectionDateEnd", collectionDateEnd) + .add("collectionDateStart", collectionDateStart) + .add("collectionGranularity", collectionGranularity) + .add("cost", cost) + .add("dataSource", dataSource) + .add("description", description) + .add("documentationLink", documentationLink) + .add("embeddedNotebookFileInfos", embeddedNotebookFileInfos) + .add("fileIds", fileIds) + .add("geographicalCoverage", geographicalCoverage) + .add("license", license) + .add("pricingModel", pricingModel) + .add("privacyPolicyLink", privacyPolicyLink) + .add("size", size) + .add("supportLink", supportLink) + .add("tags", tags) + .add("termsOfService", termsOfService) + .add("updateFrequency", updateFrequency) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingFulfillment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingFulfillment.java new file mode 100755 index 000000000..2fc0506cd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingFulfillment.java @@ -0,0 +1,104 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class ListingFulfillment { + /** */ + @JsonProperty("fulfillment_type") + private FulfillmentType fulfillmentType; + + /** */ + @JsonProperty("listing_id") + private String listingId; + + /** */ + @JsonProperty("recipient_type") + private DeltaSharingRecipientType recipientType; + + /** */ + @JsonProperty("repo_info") + private RepoInfo repoInfo; + + /** */ + @JsonProperty("share_info") + private ShareInfo shareInfo; + + public ListingFulfillment setFulfillmentType(FulfillmentType fulfillmentType) { + this.fulfillmentType = fulfillmentType; + return this; + } + + public FulfillmentType getFulfillmentType() { + return fulfillmentType; + } + + public ListingFulfillment setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + public ListingFulfillment setRecipientType(DeltaSharingRecipientType recipientType) { + this.recipientType = recipientType; + return this; + } + + public DeltaSharingRecipientType getRecipientType() { + return recipientType; + } + + public ListingFulfillment setRepoInfo(RepoInfo repoInfo) { + this.repoInfo = repoInfo; + return this; + } + + public RepoInfo getRepoInfo() { + return repoInfo; + } + + public ListingFulfillment setShareInfo(ShareInfo shareInfo) { + this.shareInfo = shareInfo; + return this; + } + + public ShareInfo getShareInfo() { + return shareInfo; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListingFulfillment that = (ListingFulfillment) o; + return Objects.equals(fulfillmentType, that.fulfillmentType) + && Objects.equals(listingId, that.listingId) + && Objects.equals(recipientType, that.recipientType) + && Objects.equals(repoInfo, that.repoInfo) + && Objects.equals(shareInfo, that.shareInfo); + } + + @Override + public int hashCode() { + return Objects.hash(fulfillmentType, listingId, recipientType, repoInfo, shareInfo); + } + + @Override + public String toString() { + return new ToStringer(ListingFulfillment.class) + .add("fulfillmentType", fulfillmentType) + .add("listingId", listingId) + .add("recipientType", recipientType) + .add("repoInfo", repoInfo) + .add("shareInfo", shareInfo) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSetting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSetting.java new file mode 100755 index 000000000..34c8e969c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSetting.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListingSetting { + /** filters are joined with `or` conjunction. */ + @JsonProperty("filters") + private Collection filters; + + /** */ + @JsonProperty("visibility") + private Visibility visibility; + + public ListingSetting setFilters(Collection filters) { + this.filters = filters; + return this; + } + + public Collection getFilters() { + return filters; + } + + public ListingSetting setVisibility(Visibility visibility) { + this.visibility = visibility; + return this; + } + + public Visibility getVisibility() { + return visibility; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListingSetting that = (ListingSetting) o; + return Objects.equals(filters, that.filters) && Objects.equals(visibility, that.visibility); + } + + @Override + public int hashCode() { + return Objects.hash(filters, visibility); + } + + @Override + public String toString() { + return new ToStringer(ListingSetting.class) + .add("filters", filters) + .add("visibility", visibility) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingShareType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingShareType.java new file mode 100755 index 000000000..f63a807c1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingShareType.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum ListingShareType { + FULL, + SAMPLE, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingStatus.java new file mode 100755 index 000000000..ff7dad470 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingStatus.java @@ -0,0 +1,14 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +/** Enums */ +@Generated +public enum ListingStatus { + DRAFT, + PENDING, + PUBLISHED, + SUSPENDED, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSummary.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSummary.java new file mode 100755 index 000000000..5ea87f394 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingSummary.java @@ -0,0 +1,354 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** Next Number: 26 */ +@Generated +public class ListingSummary { + /** */ + @JsonProperty("categories") + private Collection categories; + + /** */ + @JsonProperty("created_at") + private Long createdAt; + + /** */ + @JsonProperty("created_by") + private String createdBy; + + /** */ + @JsonProperty("created_by_id") + private Long createdById; + + /** */ + @JsonProperty("exchange_ids") + private Collection exchangeIds; + + /** + * if a git repo is being created, a listing will be initialized with this field as opposed to a + * share + */ + @JsonProperty("git_repo") + private RepoInfo gitRepo; + + /** */ + @JsonProperty("listingType") + private ListingType listingType; + + /** */ + @JsonProperty("metastore_id") + private String metastoreId; + + /** */ + @JsonProperty("name") + private String name; + + /** */ + @JsonProperty("provider_id") + private String providerId; + + /** */ + @JsonProperty("provider_region") + private RegionInfo providerRegion; + + /** */ + @JsonProperty("published_at") + private Long publishedAt; + + /** */ + @JsonProperty("published_by") + private String publishedBy; + + /** */ + @JsonProperty("setting") + private ListingSetting setting; + + /** */ + @JsonProperty("share") + private ShareInfo share; + + /** Enums */ + @JsonProperty("status") + private ListingStatus status; + + /** */ + @JsonProperty("subtitle") + private String subtitle; + + /** */ + @JsonProperty("updated_at") + private Long updatedAt; + + /** */ + @JsonProperty("updated_by") + private String updatedBy; + + /** */ + @JsonProperty("updated_by_id") + private Long updatedById; + + public ListingSummary setCategories(Collection categories) { + this.categories = categories; + return this; + } + + public Collection getCategories() { + return categories; + } + + public ListingSummary setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public ListingSummary setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public ListingSummary setCreatedById(Long createdById) { + this.createdById = createdById; + return this; + } + + public Long getCreatedById() { + return createdById; + } + + public ListingSummary setExchangeIds(Collection exchangeIds) { + this.exchangeIds = exchangeIds; + return this; + } + + public Collection getExchangeIds() { + return exchangeIds; + } + + public ListingSummary setGitRepo(RepoInfo gitRepo) { + this.gitRepo = gitRepo; + return this; + } + + public RepoInfo getGitRepo() { + return gitRepo; + } + + public ListingSummary setListingType(ListingType listingType) { + this.listingType = listingType; + return this; + } + + public ListingType getListingType() { + return listingType; + } + + public ListingSummary setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public ListingSummary setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ListingSummary setProviderId(String providerId) { + this.providerId = providerId; + return this; + } + + public String getProviderId() { + return providerId; + } + + public ListingSummary setProviderRegion(RegionInfo providerRegion) { + this.providerRegion = providerRegion; + return this; + } + + public RegionInfo getProviderRegion() { + return providerRegion; + } + + public ListingSummary setPublishedAt(Long publishedAt) { + this.publishedAt = publishedAt; + return this; + } + + public Long getPublishedAt() { + return publishedAt; + } + + public ListingSummary setPublishedBy(String publishedBy) { + this.publishedBy = publishedBy; + return this; + } + + public String getPublishedBy() { + return publishedBy; + } + + public ListingSummary setSetting(ListingSetting setting) { + this.setting = setting; + return this; + } + + public ListingSetting getSetting() { + return setting; + } + + public ListingSummary setShare(ShareInfo share) { + this.share = share; + return this; + } + + public ShareInfo getShare() { + return share; + } + + public ListingSummary setStatus(ListingStatus status) { + this.status = status; + return this; + } + + public ListingStatus getStatus() { + return status; + } + + public ListingSummary setSubtitle(String subtitle) { + this.subtitle = subtitle; + return this; + } + + public String getSubtitle() { + return subtitle; + } + + public ListingSummary setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public ListingSummary setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + public ListingSummary setUpdatedById(Long updatedById) { + this.updatedById = updatedById; + return this; + } + + public Long getUpdatedById() { + return updatedById; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListingSummary that = (ListingSummary) o; + return Objects.equals(categories, that.categories) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(createdById, that.createdById) + && Objects.equals(exchangeIds, that.exchangeIds) + && Objects.equals(gitRepo, that.gitRepo) + && Objects.equals(listingType, that.listingType) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(name, that.name) + && Objects.equals(providerId, that.providerId) + && Objects.equals(providerRegion, that.providerRegion) + && Objects.equals(publishedAt, that.publishedAt) + && Objects.equals(publishedBy, that.publishedBy) + && Objects.equals(setting, that.setting) + && Objects.equals(share, that.share) + && Objects.equals(status, that.status) + && Objects.equals(subtitle, that.subtitle) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy) + && Objects.equals(updatedById, that.updatedById); + } + + @Override + public int hashCode() { + return Objects.hash( + categories, + createdAt, + createdBy, + createdById, + exchangeIds, + gitRepo, + listingType, + metastoreId, + name, + providerId, + providerRegion, + publishedAt, + publishedBy, + setting, + share, + status, + subtitle, + updatedAt, + updatedBy, + updatedById); + } + + @Override + public String toString() { + return new ToStringer(ListingSummary.class) + .add("categories", categories) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("createdById", createdById) + .add("exchangeIds", exchangeIds) + .add("gitRepo", gitRepo) + .add("listingType", listingType) + .add("metastoreId", metastoreId) + .add("name", name) + .add("providerId", providerId) + .add("providerRegion", providerRegion) + .add("publishedAt", publishedAt) + .add("publishedBy", publishedBy) + .add("setting", setting) + .add("share", share) + .add("status", status) + .add("subtitle", subtitle) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .add("updatedById", updatedById) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingTag.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingTag.java new file mode 100755 index 000000000..c1bfefb78 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingTag.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class ListingTag { + /** Tag name (enum) */ + @JsonProperty("tag_name") + private ListingTagType tagName; + + /** String representation of the tag value. Values should be string literals (no complex types) */ + @JsonProperty("tag_values") + private Collection tagValues; + + public ListingTag setTagName(ListingTagType tagName) { + this.tagName = tagName; + return this; + } + + public ListingTagType getTagName() { + return tagName; + } + + public ListingTag setTagValues(Collection tagValues) { + this.tagValues = tagValues; + return this; + } + + public Collection getTagValues() { + return tagValues; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ListingTag that = (ListingTag) o; + return Objects.equals(tagName, that.tagName) && Objects.equals(tagValues, that.tagValues); + } + + @Override + public int hashCode() { + return Objects.hash(tagName, tagValues); + } + + @Override + public String toString() { + return new ToStringer(ListingTag.class) + .add("tagName", tagName) + .add("tagValues", tagValues) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingTagType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingTagType.java new file mode 100755 index 000000000..e019fed8b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingTagType.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum ListingTagType { + LISTING_TAG_TYPE_LANGUAGE, + LISTING_TAG_TYPE_TASK, + LISTING_TAG_TYPE_UNSPECIFIED, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingType.java new file mode 100755 index 000000000..0f6e96a57 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ListingType.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum ListingType { + PERSONALIZED, + STANDARD, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/MarketplaceFileType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/MarketplaceFileType.java new file mode 100755 index 000000000..cb7135dc2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/MarketplaceFileType.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum MarketplaceFileType { + EMBEDDED_NOTEBOOK, + PROVIDER_ICON, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/PersonalizationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/PersonalizationRequest.java new file mode 100755 index 000000000..8765d123b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/PersonalizationRequest.java @@ -0,0 +1,285 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class PersonalizationRequest { + /** */ + @JsonProperty("comment") + private String comment; + + /** */ + @JsonProperty("consumer_region") + private RegionInfo consumerRegion; + + /** contact info for the consumer requesting data or performing a listing installation */ + @JsonProperty("contact_info") + private ContactInfo contactInfo; + + /** */ + @JsonProperty("created_at") + private Long createdAt; + + /** */ + @JsonProperty("id") + private String id; + + /** */ + @JsonProperty("intended_use") + private String intendedUse; + + /** */ + @JsonProperty("is_from_lighthouse") + private Boolean isFromLighthouse; + + /** */ + @JsonProperty("listing_id") + private String listingId; + + /** */ + @JsonProperty("listing_name") + private String listingName; + + /** */ + @JsonProperty("metastore_id") + private String metastoreId; + + /** */ + @JsonProperty("provider_id") + private String providerId; + + /** */ + @JsonProperty("recipient_type") + private DeltaSharingRecipientType recipientType; + + /** */ + @JsonProperty("share") + private ShareInfo share; + + /** */ + @JsonProperty("status") + private PersonalizationRequestStatus status; + + /** */ + @JsonProperty("status_message") + private String statusMessage; + + /** */ + @JsonProperty("updated_at") + private Long updatedAt; + + public PersonalizationRequest setComment(String comment) { + this.comment = comment; + return this; + } + + public String getComment() { + return comment; + } + + public PersonalizationRequest setConsumerRegion(RegionInfo consumerRegion) { + this.consumerRegion = consumerRegion; + return this; + } + + public RegionInfo getConsumerRegion() { + return consumerRegion; + } + + public PersonalizationRequest setContactInfo(ContactInfo contactInfo) { + this.contactInfo = contactInfo; + return this; + } + + public ContactInfo getContactInfo() { + return contactInfo; + } + + public PersonalizationRequest setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public PersonalizationRequest setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public PersonalizationRequest setIntendedUse(String intendedUse) { + this.intendedUse = intendedUse; + return this; + } + + public String getIntendedUse() { + return intendedUse; + } + + public PersonalizationRequest setIsFromLighthouse(Boolean isFromLighthouse) { + this.isFromLighthouse = isFromLighthouse; + return this; + } + + public Boolean getIsFromLighthouse() { + return isFromLighthouse; + } + + public PersonalizationRequest setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + public PersonalizationRequest setListingName(String listingName) { + this.listingName = listingName; + return this; + } + + public String getListingName() { + return listingName; + } + + public PersonalizationRequest setMetastoreId(String metastoreId) { + this.metastoreId = metastoreId; + return this; + } + + public String getMetastoreId() { + return metastoreId; + } + + public PersonalizationRequest setProviderId(String providerId) { + this.providerId = providerId; + return this; + } + + public String getProviderId() { + return providerId; + } + + public PersonalizationRequest setRecipientType(DeltaSharingRecipientType recipientType) { + this.recipientType = recipientType; + return this; + } + + public DeltaSharingRecipientType getRecipientType() { + return recipientType; + } + + public PersonalizationRequest setShare(ShareInfo share) { + this.share = share; + return this; + } + + public ShareInfo getShare() { + return share; + } + + public PersonalizationRequest setStatus(PersonalizationRequestStatus status) { + this.status = status; + return this; + } + + public PersonalizationRequestStatus getStatus() { + return status; + } + + public PersonalizationRequest setStatusMessage(String statusMessage) { + this.statusMessage = statusMessage; + return this; + } + + public String getStatusMessage() { + return statusMessage; + } + + public PersonalizationRequest setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PersonalizationRequest that = (PersonalizationRequest) o; + return Objects.equals(comment, that.comment) + && Objects.equals(consumerRegion, that.consumerRegion) + && Objects.equals(contactInfo, that.contactInfo) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(id, that.id) + && Objects.equals(intendedUse, that.intendedUse) + && Objects.equals(isFromLighthouse, that.isFromLighthouse) + && Objects.equals(listingId, that.listingId) + && Objects.equals(listingName, that.listingName) + && Objects.equals(metastoreId, that.metastoreId) + && Objects.equals(providerId, that.providerId) + && Objects.equals(recipientType, that.recipientType) + && Objects.equals(share, that.share) + && Objects.equals(status, that.status) + && Objects.equals(statusMessage, that.statusMessage) + && Objects.equals(updatedAt, that.updatedAt); + } + + @Override + public int hashCode() { + return Objects.hash( + comment, + consumerRegion, + contactInfo, + createdAt, + id, + intendedUse, + isFromLighthouse, + listingId, + listingName, + metastoreId, + providerId, + recipientType, + share, + status, + statusMessage, + updatedAt); + } + + @Override + public String toString() { + return new ToStringer(PersonalizationRequest.class) + .add("comment", comment) + .add("consumerRegion", consumerRegion) + .add("contactInfo", contactInfo) + .add("createdAt", createdAt) + .add("id", id) + .add("intendedUse", intendedUse) + .add("isFromLighthouse", isFromLighthouse) + .add("listingId", listingId) + .add("listingName", listingName) + .add("metastoreId", metastoreId) + .add("providerId", providerId) + .add("recipientType", recipientType) + .add("share", share) + .add("status", status) + .add("statusMessage", statusMessage) + .add("updatedAt", updatedAt) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/PersonalizationRequestStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/PersonalizationRequestStatus.java new file mode 100755 index 000000000..495fe0f0c --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/PersonalizationRequestStatus.java @@ -0,0 +1,13 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum PersonalizationRequestStatus { + DENIED, + FULFILLED, + NEW, + REQUEST_PENDING, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderAnalyticsDashboard.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderAnalyticsDashboard.java new file mode 100755 index 000000000..a44796fae --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderAnalyticsDashboard.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class ProviderAnalyticsDashboard { + /** */ + @JsonProperty("id") + private String id; + + public ProviderAnalyticsDashboard setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ProviderAnalyticsDashboard that = (ProviderAnalyticsDashboard) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(ProviderAnalyticsDashboard.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersAPI.java new file mode 100755 index 000000000..bf1fe037a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersAPI.java @@ -0,0 +1,92 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Marketplace exchanges filters curate which groups can access an exchange. */ +@Generated +public class ProviderExchangeFiltersAPI { + private static final Logger LOG = LoggerFactory.getLogger(ProviderExchangeFiltersAPI.class); + + private final ProviderExchangeFiltersService impl; + + /** Regular-use constructor */ + public ProviderExchangeFiltersAPI(ApiClient apiClient) { + impl = new ProviderExchangeFiltersImpl(apiClient); + } + + /** Constructor for mocks */ + public ProviderExchangeFiltersAPI(ProviderExchangeFiltersService mock) { + impl = mock; + } + + public CreateExchangeFilterResponse create(ExchangeFilter filter) { + return create(new CreateExchangeFilterRequest().setFilter(filter)); + } + + /** + * Create a new exchange filter. + * + *

Add an exchange filter. + */ + public CreateExchangeFilterResponse create(CreateExchangeFilterRequest request) { + return impl.create(request); + } + + public void delete(String id) { + delete(new DeleteExchangeFilterRequest().setId(id)); + } + + /** + * Delete an exchange filter. + * + *

Delete an exchange filter + */ + public void delete(DeleteExchangeFilterRequest request) { + impl.delete(request); + } + + public Iterable list(String exchangeId) { + return list(new ListExchangeFiltersRequest().setExchangeId(exchangeId)); + } + + /** + * List exchange filters. + * + *

List exchange filter + */ + public Iterable list(ListExchangeFiltersRequest request) { + return new Paginator<>( + request, + impl::list, + ListExchangeFiltersResponse::getFilters, + response -> { + String token = response.getNextPageToken(); + if (token == null) { + return null; + } + return request.setPageToken(token); + }); + } + + public UpdateExchangeFilterResponse update(String id, ExchangeFilter filter) { + return update(new UpdateExchangeFilterRequest().setId(id).setFilter(filter)); + } + + /** + * Update exchange filter. + * + *

Update an exchange filter. + */ + public UpdateExchangeFilterResponse update(UpdateExchangeFilterRequest request) { + return impl.update(request); + } + + public ProviderExchangeFiltersService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersImpl.java new file mode 100755 index 000000000..19ca56ec5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersImpl.java @@ -0,0 +1,51 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of ProviderExchangeFilters */ +@Generated +class ProviderExchangeFiltersImpl implements ProviderExchangeFiltersService { + private final ApiClient apiClient; + + public ProviderExchangeFiltersImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public CreateExchangeFilterResponse create(CreateExchangeFilterRequest request) { + String path = "/api/2.0/marketplace-exchange/filters"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.POST(path, request, CreateExchangeFilterResponse.class, headers); + } + + @Override + public void delete(DeleteExchangeFilterRequest request) { + String path = String.format("/api/2.0/marketplace-exchange/filters/%s", request.getId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + apiClient.DELETE(path, request, DeleteExchangeFilterResponse.class, headers); + } + + @Override + public ListExchangeFiltersResponse list(ListExchangeFiltersRequest request) { + String path = "/api/2.0/marketplace-exchange/filters"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, ListExchangeFiltersResponse.class, headers); + } + + @Override + public UpdateExchangeFilterResponse update(UpdateExchangeFilterRequest request) { + String path = String.format("/api/2.0/marketplace-exchange/filters/%s", request.getId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.PUT(path, request, UpdateExchangeFilterResponse.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersService.java new file mode 100755 index 000000000..0b554e895 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangeFiltersService.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +/** + * Marketplace exchanges filters curate which groups can access an exchange. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface ProviderExchangeFiltersService { + /** + * Create a new exchange filter. + * + *

Add an exchange filter. + */ + CreateExchangeFilterResponse create(CreateExchangeFilterRequest createExchangeFilterRequest); + + /** + * Delete an exchange filter. + * + *

Delete an exchange filter + */ + void delete(DeleteExchangeFilterRequest deleteExchangeFilterRequest); + + /** + * List exchange filters. + * + *

List exchange filter + */ + ListExchangeFiltersResponse list(ListExchangeFiltersRequest listExchangeFiltersRequest); + + /** + * Update exchange filter. + * + *

Update an exchange filter. + */ + UpdateExchangeFilterResponse update(UpdateExchangeFilterRequest updateExchangeFilterRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesAPI.java new file mode 100755 index 000000000..84954c14e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesAPI.java @@ -0,0 +1,176 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Marketplace exchanges allow providers to share their listings with a curated set of customers. + */ +@Generated +public class ProviderExchangesAPI { + private static final Logger LOG = LoggerFactory.getLogger(ProviderExchangesAPI.class); + + private final ProviderExchangesService impl; + + /** Regular-use constructor */ + public ProviderExchangesAPI(ApiClient apiClient) { + impl = new ProviderExchangesImpl(apiClient); + } + + /** Constructor for mocks */ + public ProviderExchangesAPI(ProviderExchangesService mock) { + impl = mock; + } + + public AddExchangeForListingResponse addListingToExchange(String listingId, String exchangeId) { + return addListingToExchange( + new AddExchangeForListingRequest().setListingId(listingId).setExchangeId(exchangeId)); + } + + /** + * Add an exchange for listing. + * + *

Associate an exchange with a listing + */ + public AddExchangeForListingResponse addListingToExchange(AddExchangeForListingRequest request) { + return impl.addListingToExchange(request); + } + + public CreateExchangeResponse create(Exchange exchange) { + return create(new CreateExchangeRequest().setExchange(exchange)); + } + + /** + * Create an exchange. + * + *

Create an exchange + */ + public CreateExchangeResponse create(CreateExchangeRequest request) { + return impl.create(request); + } + + public void delete(String id) { + delete(new DeleteExchangeRequest().setId(id)); + } + + /** + * Delete an exchange. + * + *

This removes a listing from marketplace. + */ + public void delete(DeleteExchangeRequest request) { + impl.delete(request); + } + + public void deleteListingFromExchange(String id) { + deleteListingFromExchange(new RemoveExchangeForListingRequest().setId(id)); + } + + /** + * Remove an exchange for listing. + * + *

Disassociate an exchange with a listing + */ + public void deleteListingFromExchange(RemoveExchangeForListingRequest request) { + impl.deleteListingFromExchange(request); + } + + public GetExchangeResponse get(String id) { + return get(new GetExchangeRequest().setId(id)); + } + + /** + * Get an exchange. + * + *

Get an exchange. + */ + public GetExchangeResponse get(GetExchangeRequest request) { + return impl.get(request); + } + + /** + * List exchanges. + * + *

List exchanges visible to provider + */ + public Iterable list(ListExchangesRequest request) { + return new Paginator<>( + request, + impl::list, + ListExchangesResponse::getExchanges, + response -> { + String token = response.getNextPageToken(); + if (token == null) { + return null; + } + return request.setPageToken(token); + }); + } + + public Iterable listExchangesForListing(String listingId) { + return listExchangesForListing(new ListExchangesForListingRequest().setListingId(listingId)); + } + + /** + * List exchanges for listing. + * + *

List exchanges associated with a listing + */ + public Iterable listExchangesForListing(ListExchangesForListingRequest request) { + return new Paginator<>( + request, + impl::listExchangesForListing, + ListExchangesForListingResponse::getExchangeListing, + response -> { + String token = response.getNextPageToken(); + if (token == null) { + return null; + } + return request.setPageToken(token); + }); + } + + public Iterable listListingsForExchange(String exchangeId) { + return listListingsForExchange(new ListListingsForExchangeRequest().setExchangeId(exchangeId)); + } + + /** + * List listings for exchange. + * + *

List listings associated with an exchange + */ + public Iterable listListingsForExchange(ListListingsForExchangeRequest request) { + return new Paginator<>( + request, + impl::listListingsForExchange, + ListListingsForExchangeResponse::getExchangeListings, + response -> { + String token = response.getNextPageToken(); + if (token == null) { + return null; + } + return request.setPageToken(token); + }); + } + + public UpdateExchangeResponse update(String id, Exchange exchange) { + return update(new UpdateExchangeRequest().setId(id).setExchange(exchange)); + } + + /** + * Update exchange. + * + *

Update an exchange + */ + public UpdateExchangeResponse update(UpdateExchangeRequest request) { + return impl.update(request); + } + + public ProviderExchangesService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesImpl.java new file mode 100755 index 000000000..2c47a19e8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesImpl.java @@ -0,0 +1,95 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of ProviderExchanges */ +@Generated +class ProviderExchangesImpl implements ProviderExchangesService { + private final ApiClient apiClient; + + public ProviderExchangesImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public AddExchangeForListingResponse addListingToExchange(AddExchangeForListingRequest request) { + String path = "/api/2.0/marketplace-exchange/exchanges-for-listing"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.POST(path, request, AddExchangeForListingResponse.class, headers); + } + + @Override + public CreateExchangeResponse create(CreateExchangeRequest request) { + String path = "/api/2.0/marketplace-exchange/exchanges"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.POST(path, request, CreateExchangeResponse.class, headers); + } + + @Override + public void delete(DeleteExchangeRequest request) { + String path = String.format("/api/2.0/marketplace-exchange/exchanges/%s", request.getId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + apiClient.DELETE(path, request, DeleteExchangeResponse.class, headers); + } + + @Override + public void deleteListingFromExchange(RemoveExchangeForListingRequest request) { + String path = + String.format("/api/2.0/marketplace-exchange/exchanges-for-listing/%s", request.getId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + apiClient.DELETE(path, request, RemoveExchangeForListingResponse.class, headers); + } + + @Override + public GetExchangeResponse get(GetExchangeRequest request) { + String path = String.format("/api/2.0/marketplace-exchange/exchanges/%s", request.getId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, GetExchangeResponse.class, headers); + } + + @Override + public ListExchangesResponse list(ListExchangesRequest request) { + String path = "/api/2.0/marketplace-exchange/exchanges"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, ListExchangesResponse.class, headers); + } + + @Override + public ListExchangesForListingResponse listExchangesForListing( + ListExchangesForListingRequest request) { + String path = "/api/2.0/marketplace-exchange/exchanges-for-listing"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, ListExchangesForListingResponse.class, headers); + } + + @Override + public ListListingsForExchangeResponse listListingsForExchange( + ListListingsForExchangeRequest request) { + String path = "/api/2.0/marketplace-exchange/listings-for-exchange"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, ListListingsForExchangeResponse.class, headers); + } + + @Override + public UpdateExchangeResponse update(UpdateExchangeRequest request) { + String path = String.format("/api/2.0/marketplace-exchange/exchanges/%s", request.getId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.PUT(path, request, UpdateExchangeResponse.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesService.java new file mode 100755 index 000000000..50b789076 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderExchangesService.java @@ -0,0 +1,80 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +/** + * Marketplace exchanges allow providers to share their listings with a curated set of customers. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface ProviderExchangesService { + /** + * Add an exchange for listing. + * + *

Associate an exchange with a listing + */ + AddExchangeForListingResponse addListingToExchange( + AddExchangeForListingRequest addExchangeForListingRequest); + + /** + * Create an exchange. + * + *

Create an exchange + */ + CreateExchangeResponse create(CreateExchangeRequest createExchangeRequest); + + /** + * Delete an exchange. + * + *

This removes a listing from marketplace. + */ + void delete(DeleteExchangeRequest deleteExchangeRequest); + + /** + * Remove an exchange for listing. + * + *

Disassociate an exchange with a listing + */ + void deleteListingFromExchange(RemoveExchangeForListingRequest removeExchangeForListingRequest); + + /** + * Get an exchange. + * + *

Get an exchange. + */ + GetExchangeResponse get(GetExchangeRequest getExchangeRequest); + + /** + * List exchanges. + * + *

List exchanges visible to provider + */ + ListExchangesResponse list(ListExchangesRequest listExchangesRequest); + + /** + * List exchanges for listing. + * + *

List exchanges associated with a listing + */ + ListExchangesForListingResponse listExchangesForListing( + ListExchangesForListingRequest listExchangesForListingRequest); + + /** + * List listings for exchange. + * + *

List listings associated with an exchange + */ + ListListingsForExchangeResponse listListingsForExchange( + ListListingsForExchangeRequest listListingsForExchangeRequest); + + /** + * Update exchange. + * + *

Update an exchange + */ + UpdateExchangeResponse update(UpdateExchangeRequest updateExchangeRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesAPI.java new file mode 100755 index 000000000..cf27c77ec --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesAPI.java @@ -0,0 +1,100 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Marketplace offers a set of file APIs for various purposes such as preview notebooks and provider + * icons. + */ +@Generated +public class ProviderFilesAPI { + private static final Logger LOG = LoggerFactory.getLogger(ProviderFilesAPI.class); + + private final ProviderFilesService impl; + + /** Regular-use constructor */ + public ProviderFilesAPI(ApiClient apiClient) { + impl = new ProviderFilesImpl(apiClient); + } + + /** Constructor for mocks */ + public ProviderFilesAPI(ProviderFilesService mock) { + impl = mock; + } + + public CreateFileResponse create( + FileParent fileParent, MarketplaceFileType marketplaceFileType, String mimeType) { + return create( + new CreateFileRequest() + .setFileParent(fileParent) + .setMarketplaceFileType(marketplaceFileType) + .setMimeType(mimeType)); + } + + /** + * Create a file. + * + *

Create a file. Currently, only provider icons and attached notebooks are supported. + */ + public CreateFileResponse create(CreateFileRequest request) { + return impl.create(request); + } + + public void delete(String fileId) { + delete(new DeleteFileRequest().setFileId(fileId)); + } + + /** + * Delete a file. + * + *

Delete a file + */ + public void delete(DeleteFileRequest request) { + impl.delete(request); + } + + public GetFileResponse get(String fileId) { + return get(new GetFileRequest().setFileId(fileId)); + } + + /** + * Get a file. + * + *

Get a file + */ + public GetFileResponse get(GetFileRequest request) { + return impl.get(request); + } + + public Iterable list(FileParent fileParent) { + return list(new ListFilesRequest().setFileParent(fileParent)); + } + + /** + * List files. + * + *

List files attached to a parent entity. + */ + public Iterable list(ListFilesRequest request) { + return new Paginator<>( + request, + impl::list, + ListFilesResponse::getFileInfos, + response -> { + String token = response.getNextPageToken(); + if (token == null) { + return null; + } + return request.setPageToken(token); + }); + } + + public ProviderFilesService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesImpl.java new file mode 100755 index 000000000..df582e455 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesImpl.java @@ -0,0 +1,50 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of ProviderFiles */ +@Generated +class ProviderFilesImpl implements ProviderFilesService { + private final ApiClient apiClient; + + public ProviderFilesImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public CreateFileResponse create(CreateFileRequest request) { + String path = "/api/2.0/marketplace-provider/files"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.POST(path, request, CreateFileResponse.class, headers); + } + + @Override + public void delete(DeleteFileRequest request) { + String path = String.format("/api/2.0/marketplace-provider/files/%s", request.getFileId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + apiClient.DELETE(path, request, DeleteFileResponse.class, headers); + } + + @Override + public GetFileResponse get(GetFileRequest request) { + String path = String.format("/api/2.0/marketplace-provider/files/%s", request.getFileId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, GetFileResponse.class, headers); + } + + @Override + public ListFilesResponse list(ListFilesRequest request) { + String path = "/api/2.0/marketplace-provider/files"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, ListFilesResponse.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesService.java new file mode 100755 index 000000000..79d307ec2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderFilesService.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +/** + * Marketplace offers a set of file APIs for various purposes such as preview notebooks and provider + * icons. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface ProviderFilesService { + /** + * Create a file. + * + *

Create a file. Currently, only provider icons and attached notebooks are supported. + */ + CreateFileResponse create(CreateFileRequest createFileRequest); + + /** + * Delete a file. + * + *

Delete a file + */ + void delete(DeleteFileRequest deleteFileRequest); + + /** + * Get a file. + * + *

Get a file + */ + GetFileResponse get(GetFileRequest getFileRequest); + + /** + * List files. + * + *

List files attached to a parent entity. + */ + ListFilesResponse list(ListFilesRequest listFilesRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderInfo.java new file mode 100755 index 000000000..4db7dd65e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderInfo.java @@ -0,0 +1,253 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class ProviderInfo { + /** */ + @JsonProperty("business_contact_email") + private String businessContactEmail; + + /** */ + @JsonProperty("company_website_link") + private String companyWebsiteLink; + + /** */ + @JsonProperty("dark_mode_icon_file_id") + private String darkModeIconFileId; + + /** */ + @JsonProperty("dark_mode_icon_file_path") + private String darkModeIconFilePath; + + /** */ + @JsonProperty("description") + private String description; + + /** */ + @JsonProperty("icon_file_id") + private String iconFileId; + + /** */ + @JsonProperty("icon_file_path") + private String iconFilePath; + + /** */ + @JsonProperty("id") + private String id; + + /** is_featured is accessible by consumers only */ + @JsonProperty("is_featured") + private Boolean isFeatured; + + /** */ + @JsonProperty("name") + private String name; + + /** */ + @JsonProperty("privacy_policy_link") + private String privacyPolicyLink; + + /** published_by is only applicable to data aggregators (e.g. Crux) */ + @JsonProperty("published_by") + private String publishedBy; + + /** */ + @JsonProperty("support_contact_email") + private String supportContactEmail; + + /** */ + @JsonProperty("term_of_service_link") + private String termOfServiceLink; + + public ProviderInfo setBusinessContactEmail(String businessContactEmail) { + this.businessContactEmail = businessContactEmail; + return this; + } + + public String getBusinessContactEmail() { + return businessContactEmail; + } + + public ProviderInfo setCompanyWebsiteLink(String companyWebsiteLink) { + this.companyWebsiteLink = companyWebsiteLink; + return this; + } + + public String getCompanyWebsiteLink() { + return companyWebsiteLink; + } + + public ProviderInfo setDarkModeIconFileId(String darkModeIconFileId) { + this.darkModeIconFileId = darkModeIconFileId; + return this; + } + + public String getDarkModeIconFileId() { + return darkModeIconFileId; + } + + public ProviderInfo setDarkModeIconFilePath(String darkModeIconFilePath) { + this.darkModeIconFilePath = darkModeIconFilePath; + return this; + } + + public String getDarkModeIconFilePath() { + return darkModeIconFilePath; + } + + public ProviderInfo setDescription(String description) { + this.description = description; + return this; + } + + public String getDescription() { + return description; + } + + public ProviderInfo setIconFileId(String iconFileId) { + this.iconFileId = iconFileId; + return this; + } + + public String getIconFileId() { + return iconFileId; + } + + public ProviderInfo setIconFilePath(String iconFilePath) { + this.iconFilePath = iconFilePath; + return this; + } + + public String getIconFilePath() { + return iconFilePath; + } + + public ProviderInfo setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public ProviderInfo setIsFeatured(Boolean isFeatured) { + this.isFeatured = isFeatured; + return this; + } + + public Boolean getIsFeatured() { + return isFeatured; + } + + public ProviderInfo setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ProviderInfo setPrivacyPolicyLink(String privacyPolicyLink) { + this.privacyPolicyLink = privacyPolicyLink; + return this; + } + + public String getPrivacyPolicyLink() { + return privacyPolicyLink; + } + + public ProviderInfo setPublishedBy(String publishedBy) { + this.publishedBy = publishedBy; + return this; + } + + public String getPublishedBy() { + return publishedBy; + } + + public ProviderInfo setSupportContactEmail(String supportContactEmail) { + this.supportContactEmail = supportContactEmail; + return this; + } + + public String getSupportContactEmail() { + return supportContactEmail; + } + + public ProviderInfo setTermOfServiceLink(String termOfServiceLink) { + this.termOfServiceLink = termOfServiceLink; + return this; + } + + public String getTermOfServiceLink() { + return termOfServiceLink; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ProviderInfo that = (ProviderInfo) o; + return Objects.equals(businessContactEmail, that.businessContactEmail) + && Objects.equals(companyWebsiteLink, that.companyWebsiteLink) + && Objects.equals(darkModeIconFileId, that.darkModeIconFileId) + && Objects.equals(darkModeIconFilePath, that.darkModeIconFilePath) + && Objects.equals(description, that.description) + && Objects.equals(iconFileId, that.iconFileId) + && Objects.equals(iconFilePath, that.iconFilePath) + && Objects.equals(id, that.id) + && Objects.equals(isFeatured, that.isFeatured) + && Objects.equals(name, that.name) + && Objects.equals(privacyPolicyLink, that.privacyPolicyLink) + && Objects.equals(publishedBy, that.publishedBy) + && Objects.equals(supportContactEmail, that.supportContactEmail) + && Objects.equals(termOfServiceLink, that.termOfServiceLink); + } + + @Override + public int hashCode() { + return Objects.hash( + businessContactEmail, + companyWebsiteLink, + darkModeIconFileId, + darkModeIconFilePath, + description, + iconFileId, + iconFilePath, + id, + isFeatured, + name, + privacyPolicyLink, + publishedBy, + supportContactEmail, + termOfServiceLink); + } + + @Override + public String toString() { + return new ToStringer(ProviderInfo.class) + .add("businessContactEmail", businessContactEmail) + .add("companyWebsiteLink", companyWebsiteLink) + .add("darkModeIconFileId", darkModeIconFileId) + .add("darkModeIconFilePath", darkModeIconFilePath) + .add("description", description) + .add("iconFileId", iconFileId) + .add("iconFilePath", iconFilePath) + .add("id", id) + .add("isFeatured", isFeatured) + .add("name", name) + .add("privacyPolicyLink", privacyPolicyLink) + .add("publishedBy", publishedBy) + .add("supportContactEmail", supportContactEmail) + .add("termOfServiceLink", termOfServiceLink) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsAPI.java new file mode 100755 index 000000000..936d78d47 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsAPI.java @@ -0,0 +1,104 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Listings are the core entities in the Marketplace. They represent the products that are available + * for consumption. + */ +@Generated +public class ProviderListingsAPI { + private static final Logger LOG = LoggerFactory.getLogger(ProviderListingsAPI.class); + + private final ProviderListingsService impl; + + /** Regular-use constructor */ + public ProviderListingsAPI(ApiClient apiClient) { + impl = new ProviderListingsImpl(apiClient); + } + + /** Constructor for mocks */ + public ProviderListingsAPI(ProviderListingsService mock) { + impl = mock; + } + + public CreateListingResponse create(Listing listing) { + return create(new CreateListingRequest().setListing(listing)); + } + + /** + * Create a listing. + * + *

Create a new listing + */ + public CreateListingResponse create(CreateListingRequest request) { + return impl.create(request); + } + + public void delete(String id) { + delete(new DeleteListingRequest().setId(id)); + } + + /** + * Delete a listing. + * + *

Delete a listing + */ + public void delete(DeleteListingRequest request) { + impl.delete(request); + } + + public GetListingResponse get(String id) { + return get(new GetListingRequest().setId(id)); + } + + /** + * Get a listing. + * + *

Get a listing + */ + public GetListingResponse get(GetListingRequest request) { + return impl.get(request); + } + + /** + * List listings. + * + *

List listings owned by this provider + */ + public Iterable

list(GetListingsRequest request) { + return new Paginator<>( + request, + impl::list, + GetListingsResponse::getListings, + response -> { + String token = response.getNextPageToken(); + if (token == null) { + return null; + } + return request.setPageToken(token); + }); + } + + public UpdateListingResponse update(String id, Listing listing) { + return update(new UpdateListingRequest().setId(id).setListing(listing)); + } + + /** + * Update listing. + * + *

Update a listing + */ + public UpdateListingResponse update(UpdateListingRequest request) { + return impl.update(request); + } + + public ProviderListingsService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsImpl.java new file mode 100755 index 000000000..ba119793a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsImpl.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of ProviderListings */ +@Generated +class ProviderListingsImpl implements ProviderListingsService { + private final ApiClient apiClient; + + public ProviderListingsImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public CreateListingResponse create(CreateListingRequest request) { + String path = "/api/2.0/marketplace-provider/listing"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.POST(path, request, CreateListingResponse.class, headers); + } + + @Override + public void delete(DeleteListingRequest request) { + String path = String.format("/api/2.0/marketplace-provider/listings/%s", request.getId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + apiClient.DELETE(path, request, DeleteListingResponse.class, headers); + } + + @Override + public GetListingResponse get(GetListingRequest request) { + String path = String.format("/api/2.0/marketplace-provider/listings/%s", request.getId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, GetListingResponse.class, headers); + } + + @Override + public GetListingsResponse list(GetListingsRequest request) { + String path = "/api/2.0/marketplace-provider/listings"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, GetListingsResponse.class, headers); + } + + @Override + public UpdateListingResponse update(UpdateListingRequest request) { + String path = String.format("/api/2.0/marketplace-provider/listings/%s", request.getId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.PUT(path, request, UpdateListingResponse.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsService.java new file mode 100755 index 000000000..20d8792aa --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderListingsService.java @@ -0,0 +1,50 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +/** + * Listings are the core entities in the Marketplace. They represent the products that are available + * for consumption. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface ProviderListingsService { + /** + * Create a listing. + * + *

Create a new listing + */ + CreateListingResponse create(CreateListingRequest createListingRequest); + + /** + * Delete a listing. + * + *

Delete a listing + */ + void delete(DeleteListingRequest deleteListingRequest); + + /** + * Get a listing. + * + *

Get a listing + */ + GetListingResponse get(GetListingRequest getListingRequest); + + /** + * List listings. + * + *

List listings owned by this provider + */ + GetListingsResponse list(GetListingsRequest getListingsRequest); + + /** + * Update listing. + * + *

Update a listing + */ + UpdateListingResponse update(UpdateListingRequest updateListingRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderPersonalizationRequestsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderPersonalizationRequestsAPI.java new file mode 100755 index 000000000..d4553b717 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderPersonalizationRequestsAPI.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Personalization requests are an alternate to instantly available listings. Control the lifecycle + * of personalized solutions. + */ +@Generated +public class ProviderPersonalizationRequestsAPI { + private static final Logger LOG = + LoggerFactory.getLogger(ProviderPersonalizationRequestsAPI.class); + + private final ProviderPersonalizationRequestsService impl; + + /** Regular-use constructor */ + public ProviderPersonalizationRequestsAPI(ApiClient apiClient) { + impl = new ProviderPersonalizationRequestsImpl(apiClient); + } + + /** Constructor for mocks */ + public ProviderPersonalizationRequestsAPI(ProviderPersonalizationRequestsService mock) { + impl = mock; + } + + /** + * All personalization requests across all listings. + * + *

List personalization requests to this provider. This will return all personalization + * requests, regardless of which listing they are for. + */ + public Iterable list(ListAllPersonalizationRequestsRequest request) { + return new Paginator<>( + request, + impl::list, + ListAllPersonalizationRequestsResponse::getPersonalizationRequests, + response -> { + String token = response.getNextPageToken(); + if (token == null) { + return null; + } + return request.setPageToken(token); + }); + } + + public UpdatePersonalizationRequestResponse update( + String listingId, String requestId, PersonalizationRequestStatus status) { + return update( + new UpdatePersonalizationRequestRequest() + .setListingId(listingId) + .setRequestId(requestId) + .setStatus(status)); + } + + /** + * Update personalization request status. + * + *

Update personalization request. This method only permits updating the status of the request. + */ + public UpdatePersonalizationRequestResponse update(UpdatePersonalizationRequestRequest request) { + return impl.update(request); + } + + public ProviderPersonalizationRequestsService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderPersonalizationRequestsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderPersonalizationRequestsImpl.java new file mode 100755 index 000000000..ba791c43d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderPersonalizationRequestsImpl.java @@ -0,0 +1,38 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of ProviderPersonalizationRequests */ +@Generated +class ProviderPersonalizationRequestsImpl implements ProviderPersonalizationRequestsService { + private final ApiClient apiClient; + + public ProviderPersonalizationRequestsImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public ListAllPersonalizationRequestsResponse list( + ListAllPersonalizationRequestsRequest request) { + String path = "/api/2.0/marketplace-provider/personalization-requests"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, ListAllPersonalizationRequestsResponse.class, headers); + } + + @Override + public UpdatePersonalizationRequestResponse update(UpdatePersonalizationRequestRequest request) { + String path = + String.format( + "/api/2.0/marketplace-provider/listings/%s/personalization-requests/%s/request-status", + request.getListingId(), request.getRequestId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.PUT(path, request, UpdatePersonalizationRequestResponse.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderPersonalizationRequestsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderPersonalizationRequestsService.java new file mode 100755 index 000000000..c6fdc7a44 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderPersonalizationRequestsService.java @@ -0,0 +1,32 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +/** + * Personalization requests are an alternate to instantly available listings. Control the lifecycle + * of personalized solutions. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface ProviderPersonalizationRequestsService { + /** + * All personalization requests across all listings. + * + *

List personalization requests to this provider. This will return all personalization + * requests, regardless of which listing they are for. + */ + ListAllPersonalizationRequestsResponse list( + ListAllPersonalizationRequestsRequest listAllPersonalizationRequestsRequest); + + /** + * Update personalization request status. + * + *

Update personalization request. This method only permits updating the status of the request. + */ + UpdatePersonalizationRequestResponse update( + UpdatePersonalizationRequestRequest updatePersonalizationRequestRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProviderAnalyticsDashboardsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProviderAnalyticsDashboardsAPI.java new file mode 100755 index 000000000..52a5c05b9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProviderAnalyticsDashboardsAPI.java @@ -0,0 +1,72 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Manage templated analytics solution for providers. */ +@Generated +public class ProviderProviderAnalyticsDashboardsAPI { + private static final Logger LOG = + LoggerFactory.getLogger(ProviderProviderAnalyticsDashboardsAPI.class); + + private final ProviderProviderAnalyticsDashboardsService impl; + + /** Regular-use constructor */ + public ProviderProviderAnalyticsDashboardsAPI(ApiClient apiClient) { + impl = new ProviderProviderAnalyticsDashboardsImpl(apiClient); + } + + /** Constructor for mocks */ + public ProviderProviderAnalyticsDashboardsAPI(ProviderProviderAnalyticsDashboardsService mock) { + impl = mock; + } + + /** + * Create provider analytics dashboard. + * + *

Create provider analytics dashboard. Returns Marketplace specific `id`. Not to be confused + * with the Lakeview dashboard id. + */ + public ProviderAnalyticsDashboard create() { + return impl.create(); + } + + /** + * Get provider analytics dashboard. + * + *

Get provider analytics dashboard. + */ + public ListProviderAnalyticsDashboardResponse get() { + return impl.get(); + } + + /** + * Get latest version of provider analytics dashboard. + * + *

Get latest version of provider analytics dashboard. + */ + public GetLatestVersionProviderAnalyticsDashboardResponse getLatestVersion() { + return impl.getLatestVersion(); + } + + public UpdateProviderAnalyticsDashboardResponse update(String id) { + return update(new UpdateProviderAnalyticsDashboardRequest().setId(id)); + } + + /** + * Update provider analytics dashboard. + * + *

Update provider analytics dashboard. + */ + public UpdateProviderAnalyticsDashboardResponse update( + UpdateProviderAnalyticsDashboardRequest request) { + return impl.update(request); + } + + public ProviderProviderAnalyticsDashboardsService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProviderAnalyticsDashboardsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProviderAnalyticsDashboardsImpl.java new file mode 100755 index 000000000..c0f1473b7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProviderAnalyticsDashboardsImpl.java @@ -0,0 +1,53 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of ProviderProviderAnalyticsDashboards */ +@Generated +class ProviderProviderAnalyticsDashboardsImpl + implements ProviderProviderAnalyticsDashboardsService { + private final ApiClient apiClient; + + public ProviderProviderAnalyticsDashboardsImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public ProviderAnalyticsDashboard create() { + String path = "/api/2.0/marketplace-provider/analytics_dashboard"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.POST(path, ProviderAnalyticsDashboard.class, headers); + } + + @Override + public ListProviderAnalyticsDashboardResponse get() { + String path = "/api/2.0/marketplace-provider/analytics_dashboard"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, ListProviderAnalyticsDashboardResponse.class, headers); + } + + @Override + public GetLatestVersionProviderAnalyticsDashboardResponse getLatestVersion() { + String path = "/api/2.0/marketplace-provider/analytics_dashboard/latest"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, GetLatestVersionProviderAnalyticsDashboardResponse.class, headers); + } + + @Override + public UpdateProviderAnalyticsDashboardResponse update( + UpdateProviderAnalyticsDashboardRequest request) { + String path = + String.format("/api/2.0/marketplace-provider/analytics_dashboard/%s", request.getId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.PUT(path, request, UpdateProviderAnalyticsDashboardResponse.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProviderAnalyticsDashboardsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProviderAnalyticsDashboardsService.java new file mode 100755 index 000000000..5b1d0a9ef --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProviderAnalyticsDashboardsService.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +/** + * Manage templated analytics solution for providers. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface ProviderProviderAnalyticsDashboardsService { + /** + * Create provider analytics dashboard. + * + *

Create provider analytics dashboard. Returns Marketplace specific `id`. Not to be confused + * with the Lakeview dashboard id. + */ + ProviderAnalyticsDashboard create(); + + /** + * Get provider analytics dashboard. + * + *

Get provider analytics dashboard. + */ + ListProviderAnalyticsDashboardResponse get(); + + /** + * Get latest version of provider analytics dashboard. + * + *

Get latest version of provider analytics dashboard. + */ + GetLatestVersionProviderAnalyticsDashboardResponse getLatestVersion(); + + /** + * Update provider analytics dashboard. + * + *

Update provider analytics dashboard. + */ + UpdateProviderAnalyticsDashboardResponse update( + UpdateProviderAnalyticsDashboardRequest updateProviderAnalyticsDashboardRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersAPI.java new file mode 100755 index 000000000..a76c6c3b9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersAPI.java @@ -0,0 +1,101 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Providers are entities that manage assets in Marketplace. */ +@Generated +public class ProviderProvidersAPI { + private static final Logger LOG = LoggerFactory.getLogger(ProviderProvidersAPI.class); + + private final ProviderProvidersService impl; + + /** Regular-use constructor */ + public ProviderProvidersAPI(ApiClient apiClient) { + impl = new ProviderProvidersImpl(apiClient); + } + + /** Constructor for mocks */ + public ProviderProvidersAPI(ProviderProvidersService mock) { + impl = mock; + } + + public CreateProviderResponse create(ProviderInfo provider) { + return create(new CreateProviderRequest().setProvider(provider)); + } + + /** + * Create a provider. + * + *

Create a provider + */ + public CreateProviderResponse create(CreateProviderRequest request) { + return impl.create(request); + } + + public void delete(String id) { + delete(new DeleteProviderRequest().setId(id)); + } + + /** + * Delete provider. + * + *

Delete provider + */ + public void delete(DeleteProviderRequest request) { + impl.delete(request); + } + + public GetProviderResponse get(String id) { + return get(new GetProviderRequest().setId(id)); + } + + /** + * Get provider. + * + *

Get provider profile + */ + public GetProviderResponse get(GetProviderRequest request) { + return impl.get(request); + } + + /** + * List providers. + * + *

List provider profiles for account. + */ + public Iterable list(ListProvidersRequest request) { + return new Paginator<>( + request, + impl::list, + ListProvidersResponse::getProviders, + response -> { + String token = response.getNextPageToken(); + if (token == null) { + return null; + } + return request.setPageToken(token); + }); + } + + public UpdateProviderResponse update(String id, ProviderInfo provider) { + return update(new UpdateProviderRequest().setId(id).setProvider(provider)); + } + + /** + * Update provider. + * + *

Update provider profile + */ + public UpdateProviderResponse update(UpdateProviderRequest request) { + return impl.update(request); + } + + public ProviderProvidersService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersImpl.java new file mode 100755 index 000000000..d25b538bc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersImpl.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of ProviderProviders */ +@Generated +class ProviderProvidersImpl implements ProviderProvidersService { + private final ApiClient apiClient; + + public ProviderProvidersImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public CreateProviderResponse create(CreateProviderRequest request) { + String path = "/api/2.0/marketplace-provider/provider"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.POST(path, request, CreateProviderResponse.class, headers); + } + + @Override + public void delete(DeleteProviderRequest request) { + String path = String.format("/api/2.0/marketplace-provider/providers/%s", request.getId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + apiClient.DELETE(path, request, DeleteProviderResponse.class, headers); + } + + @Override + public GetProviderResponse get(GetProviderRequest request) { + String path = String.format("/api/2.0/marketplace-provider/providers/%s", request.getId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, GetProviderResponse.class, headers); + } + + @Override + public ListProvidersResponse list(ListProvidersRequest request) { + String path = "/api/2.0/marketplace-provider/providers"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, ListProvidersResponse.class, headers); + } + + @Override + public UpdateProviderResponse update(UpdateProviderRequest request) { + String path = String.format("/api/2.0/marketplace-provider/providers/%s", request.getId()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.PUT(path, request, UpdateProviderResponse.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersService.java new file mode 100755 index 000000000..584ca51cf --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ProviderProvidersService.java @@ -0,0 +1,49 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +/** + * Providers are entities that manage assets in Marketplace. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface ProviderProvidersService { + /** + * Create a provider. + * + *

Create a provider + */ + CreateProviderResponse create(CreateProviderRequest createProviderRequest); + + /** + * Delete provider. + * + *

Delete provider + */ + void delete(DeleteProviderRequest deleteProviderRequest); + + /** + * Get provider. + * + *

Get provider profile + */ + GetProviderResponse get(GetProviderRequest getProviderRequest); + + /** + * List providers. + * + *

List provider profiles for account. + */ + ListProvidersResponse list(ListProvidersRequest listProvidersRequest); + + /** + * Update provider. + * + *

Update provider profile + */ + UpdateProviderResponse update(UpdateProviderRequest updateProviderRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RegionInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RegionInfo.java new file mode 100755 index 000000000..4e7cecaca --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RegionInfo.java @@ -0,0 +1,55 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class RegionInfo { + /** */ + @JsonProperty("cloud") + private String cloud; + + /** */ + @JsonProperty("region") + private String region; + + public RegionInfo setCloud(String cloud) { + this.cloud = cloud; + return this; + } + + public String getCloud() { + return cloud; + } + + public RegionInfo setRegion(String region) { + this.region = region; + return this; + } + + public String getRegion() { + return region; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RegionInfo that = (RegionInfo) o; + return Objects.equals(cloud, that.cloud) && Objects.equals(region, that.region); + } + + @Override + public int hashCode() { + return Objects.hash(cloud, region); + } + + @Override + public String toString() { + return new ToStringer(RegionInfo.class).add("cloud", cloud).add("region", region).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingRequest.java new file mode 100755 index 000000000..89fd8a178 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingRequest.java @@ -0,0 +1,41 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +/** Remove an exchange for listing */ +@Generated +public class RemoveExchangeForListingRequest { + /** */ + private String id; + + public RemoveExchangeForListingRequest setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RemoveExchangeForListingRequest that = (RemoveExchangeForListingRequest) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return new ToStringer(RemoveExchangeForListingRequest.class).add("id", id).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingResponse.java new file mode 100755 index 000000000..02a6e7364 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RemoveExchangeForListingResponse.java @@ -0,0 +1,28 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import java.util.Objects; + +@Generated +public class RemoveExchangeForListingResponse { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + return true; + } + + @Override + public int hashCode() { + return Objects.hash(); + } + + @Override + public String toString() { + return new ToStringer(RemoveExchangeForListingResponse.class).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RepoInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RepoInfo.java new file mode 100755 index 000000000..988491334 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RepoInfo.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class RepoInfo { + /** the git repo url e.g. https://github.com/databrickslabs/dolly.git */ + @JsonProperty("git_repo_url") + private String gitRepoUrl; + + public RepoInfo setGitRepoUrl(String gitRepoUrl) { + this.gitRepoUrl = gitRepoUrl; + return this; + } + + public String getGitRepoUrl() { + return gitRepoUrl; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RepoInfo that = (RepoInfo) o; + return Objects.equals(gitRepoUrl, that.gitRepoUrl); + } + + @Override + public int hashCode() { + return Objects.hash(gitRepoUrl); + } + + @Override + public String toString() { + return new ToStringer(RepoInfo.class).add("gitRepoUrl", gitRepoUrl).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RepoInstallation.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RepoInstallation.java new file mode 100755 index 000000000..33a4ad683 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/RepoInstallation.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class RepoInstallation { + /** the user-specified repo name for their installed git repo listing */ + @JsonProperty("repo_name") + private String repoName; + + /** + * refers to the full url file path that navigates the user to the repo's entrypoint (e.g. a + * README.md file, or the repo file view in the unified UI) should just be a relative path + */ + @JsonProperty("repo_path") + private String repoPath; + + public RepoInstallation setRepoName(String repoName) { + this.repoName = repoName; + return this; + } + + public String getRepoName() { + return repoName; + } + + public RepoInstallation setRepoPath(String repoPath) { + this.repoPath = repoPath; + return this; + } + + public String getRepoPath() { + return repoPath; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RepoInstallation that = (RepoInstallation) o; + return Objects.equals(repoName, that.repoName) && Objects.equals(repoPath, that.repoPath); + } + + @Override + public int hashCode() { + return Objects.hash(repoName, repoPath); + } + + @Override + public String toString() { + return new ToStringer(RepoInstallation.class) + .add("repoName", repoName) + .add("repoPath", repoPath) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SearchListingsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SearchListingsRequest.java new file mode 100755 index 000000000..1ee056b2e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SearchListingsRequest.java @@ -0,0 +1,175 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import java.util.Collection; +import java.util.Objects; + +/** Search listings */ +@Generated +public class SearchListingsRequest { + /** Matches any of the following asset types */ + @QueryParam("assets") + private Collection assets; + + /** Matches any of the following categories */ + @QueryParam("categories") + private Collection categories; + + /** */ + @QueryParam("is_free") + private Boolean isFree; + + /** */ + @QueryParam("is_private_exchange") + private Boolean isPrivateExchange; + + /** */ + @QueryParam("page_size") + private Long pageSize; + + /** */ + @QueryParam("page_token") + private String pageToken; + + /** Matches any of the following provider ids */ + @QueryParam("provider_ids") + private Collection providerIds; + + /** Fuzzy matches query */ + @QueryParam("query") + private String query; + + /** */ + @QueryParam("sort_by") + private SortBy sortBy; + + public SearchListingsRequest setAssets(Collection assets) { + this.assets = assets; + return this; + } + + public Collection getAssets() { + return assets; + } + + public SearchListingsRequest setCategories(Collection categories) { + this.categories = categories; + return this; + } + + public Collection getCategories() { + return categories; + } + + public SearchListingsRequest setIsFree(Boolean isFree) { + this.isFree = isFree; + return this; + } + + public Boolean getIsFree() { + return isFree; + } + + public SearchListingsRequest setIsPrivateExchange(Boolean isPrivateExchange) { + this.isPrivateExchange = isPrivateExchange; + return this; + } + + public Boolean getIsPrivateExchange() { + return isPrivateExchange; + } + + public SearchListingsRequest setPageSize(Long pageSize) { + this.pageSize = pageSize; + return this; + } + + public Long getPageSize() { + return pageSize; + } + + public SearchListingsRequest setPageToken(String pageToken) { + this.pageToken = pageToken; + return this; + } + + public String getPageToken() { + return pageToken; + } + + public SearchListingsRequest setProviderIds(Collection providerIds) { + this.providerIds = providerIds; + return this; + } + + public Collection getProviderIds() { + return providerIds; + } + + public SearchListingsRequest setQuery(String query) { + this.query = query; + return this; + } + + public String getQuery() { + return query; + } + + public SearchListingsRequest setSortBy(SortBy sortBy) { + this.sortBy = sortBy; + return this; + } + + public SortBy getSortBy() { + return sortBy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SearchListingsRequest that = (SearchListingsRequest) o; + return Objects.equals(assets, that.assets) + && Objects.equals(categories, that.categories) + && Objects.equals(isFree, that.isFree) + && Objects.equals(isPrivateExchange, that.isPrivateExchange) + && Objects.equals(pageSize, that.pageSize) + && Objects.equals(pageToken, that.pageToken) + && Objects.equals(providerIds, that.providerIds) + && Objects.equals(query, that.query) + && Objects.equals(sortBy, that.sortBy); + } + + @Override + public int hashCode() { + return Objects.hash( + assets, + categories, + isFree, + isPrivateExchange, + pageSize, + pageToken, + providerIds, + query, + sortBy); + } + + @Override + public String toString() { + return new ToStringer(SearchListingsRequest.class) + .add("assets", assets) + .add("categories", categories) + .add("isFree", isFree) + .add("isPrivateExchange", isPrivateExchange) + .add("pageSize", pageSize) + .add("pageToken", pageToken) + .add("providerIds", providerIds) + .add("query", query) + .add("sortBy", sortBy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SearchListingsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SearchListingsResponse.java new file mode 100755 index 000000000..d0c714e34 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SearchListingsResponse.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +@Generated +public class SearchListingsResponse { + /** */ + @JsonProperty("listings") + private Collection

listings; + + /** */ + @JsonProperty("next_page_token") + private String nextPageToken; + + public SearchListingsResponse setListings(Collection listings) { + this.listings = listings; + return this; + } + + public Collection getListings() { + return listings; + } + + public SearchListingsResponse setNextPageToken(String nextPageToken) { + this.nextPageToken = nextPageToken; + return this; + } + + public String getNextPageToken() { + return nextPageToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SearchListingsResponse that = (SearchListingsResponse) o; + return Objects.equals(listings, that.listings) + && Objects.equals(nextPageToken, that.nextPageToken); + } + + @Override + public int hashCode() { + return Objects.hash(listings, nextPageToken); + } + + @Override + public String toString() { + return new ToStringer(SearchListingsResponse.class) + .add("listings", listings) + .add("nextPageToken", nextPageToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ShareInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ShareInfo.java new file mode 100755 index 000000000..5b656447a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/ShareInfo.java @@ -0,0 +1,55 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class ShareInfo { + /** */ + @JsonProperty("name") + private String name; + + /** */ + @JsonProperty("type") + private ListingShareType typeValue; + + public ShareInfo setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + public ShareInfo setType(ListingShareType typeValue) { + this.typeValue = typeValue; + return this; + } + + public ListingShareType getType() { + return typeValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ShareInfo that = (ShareInfo) o; + return Objects.equals(name, that.name) && Objects.equals(typeValue, that.typeValue); + } + + @Override + public int hashCode() { + return Objects.hash(name, typeValue); + } + + @Override + public String toString() { + return new ToStringer(ShareInfo.class).add("name", name).add("typeValue", typeValue).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SharedDataObject.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SharedDataObject.java new file mode 100755 index 000000000..6cedfa277 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SharedDataObject.java @@ -0,0 +1,58 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class SharedDataObject { + /** The type of the data object. Could be one of: TABLE, SCHEMA, NOTEBOOK_FILE, MODEL, VOLUME */ + @JsonProperty("data_object_type") + private String dataObjectType; + + /** Name of the shared object */ + @JsonProperty("name") + private String name; + + public SharedDataObject setDataObjectType(String dataObjectType) { + this.dataObjectType = dataObjectType; + return this; + } + + public String getDataObjectType() { + return dataObjectType; + } + + public SharedDataObject setName(String name) { + this.name = name; + return this; + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SharedDataObject that = (SharedDataObject) o; + return Objects.equals(dataObjectType, that.dataObjectType) && Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(dataObjectType, name); + } + + @Override + public String toString() { + return new ToStringer(SharedDataObject.class) + .add("dataObjectType", dataObjectType) + .add("name", name) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SortBy.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SortBy.java new file mode 100755 index 000000000..19f0c0e63 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SortBy.java @@ -0,0 +1,13 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum SortBy { + SORT_BY_DATE, + SORT_BY_RELEVANCE, + SORT_BY_TITLE, + SORT_BY_UNSPECIFIED, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SortBySpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SortBySpec.java new file mode 100755 index 000000000..84f0fb28a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SortBySpec.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.QueryParam; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class SortBySpec { + /** The field on which to sort the listing. */ + @JsonProperty("sort_by") + @QueryParam("sort_by") + private SortBy sortBy; + + /** The order in which to sort the listing. */ + @JsonProperty("sort_order") + @QueryParam("sort_order") + private SortOrder sortOrder; + + public SortBySpec setSortBy(SortBy sortBy) { + this.sortBy = sortBy; + return this; + } + + public SortBy getSortBy() { + return sortBy; + } + + public SortBySpec setSortOrder(SortOrder sortOrder) { + this.sortOrder = sortOrder; + return this; + } + + public SortOrder getSortOrder() { + return sortOrder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SortBySpec that = (SortBySpec) o; + return Objects.equals(sortBy, that.sortBy) && Objects.equals(sortOrder, that.sortOrder); + } + + @Override + public int hashCode() { + return Objects.hash(sortBy, sortOrder); + } + + @Override + public String toString() { + return new ToStringer(SortBySpec.class) + .add("sortBy", sortBy) + .add("sortOrder", sortOrder) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SortOrder.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SortOrder.java new file mode 100755 index 000000000..23394ee56 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/SortOrder.java @@ -0,0 +1,12 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum SortOrder { + SORT_ORDER_ASCENDING, + SORT_ORDER_DESCENDING, + SORT_ORDER_UNSPECIFIED, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/TokenDetail.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/TokenDetail.java new file mode 100755 index 000000000..646ef5e65 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/TokenDetail.java @@ -0,0 +1,92 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class TokenDetail { + /** */ + @JsonProperty("bearerToken") + private String bearerToken; + + /** */ + @JsonProperty("endpoint") + private String endpoint; + + /** */ + @JsonProperty("expirationTime") + private String expirationTime; + + /** + * These field names must follow the delta sharing protocol. Original message: + * RetrieveToken.Response in managed-catalog/api/messages/recipient.proto + */ + @JsonProperty("shareCredentialsVersion") + private Long shareCredentialsVersion; + + public TokenDetail setBearerToken(String bearerToken) { + this.bearerToken = bearerToken; + return this; + } + + public String getBearerToken() { + return bearerToken; + } + + public TokenDetail setEndpoint(String endpoint) { + this.endpoint = endpoint; + return this; + } + + public String getEndpoint() { + return endpoint; + } + + public TokenDetail setExpirationTime(String expirationTime) { + this.expirationTime = expirationTime; + return this; + } + + public String getExpirationTime() { + return expirationTime; + } + + public TokenDetail setShareCredentialsVersion(Long shareCredentialsVersion) { + this.shareCredentialsVersion = shareCredentialsVersion; + return this; + } + + public Long getShareCredentialsVersion() { + return shareCredentialsVersion; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TokenDetail that = (TokenDetail) o; + return Objects.equals(bearerToken, that.bearerToken) + && Objects.equals(endpoint, that.endpoint) + && Objects.equals(expirationTime, that.expirationTime) + && Objects.equals(shareCredentialsVersion, that.shareCredentialsVersion); + } + + @Override + public int hashCode() { + return Objects.hash(bearerToken, endpoint, expirationTime, shareCredentialsVersion); + } + + @Override + public String toString() { + return new ToStringer(TokenDetail.class) + .add("bearerToken", bearerToken) + .add("endpoint", endpoint) + .add("expirationTime", expirationTime) + .add("shareCredentialsVersion", shareCredentialsVersion) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/TokenInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/TokenInfo.java new file mode 100755 index 000000000..42143c446 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/TokenInfo.java @@ -0,0 +1,138 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class TokenInfo { + /** + * Full activation url to retrieve the access token. It will be empty if the token is already + * retrieved. + */ + @JsonProperty("activation_url") + private String activationUrl; + + /** Time at which this Recipient Token was created, in epoch milliseconds. */ + @JsonProperty("created_at") + private Long createdAt; + + /** Username of Recipient Token creator. */ + @JsonProperty("created_by") + private String createdBy; + + /** Expiration timestamp of the token in epoch milliseconds. */ + @JsonProperty("expiration_time") + private Long expirationTime; + + /** Unique id of the Recipient Token. */ + @JsonProperty("id") + private String id; + + /** Time at which this Recipient Token was updated, in epoch milliseconds. */ + @JsonProperty("updated_at") + private Long updatedAt; + + /** Username of Recipient Token updater. */ + @JsonProperty("updated_by") + private String updatedBy; + + public TokenInfo setActivationUrl(String activationUrl) { + this.activationUrl = activationUrl; + return this; + } + + public String getActivationUrl() { + return activationUrl; + } + + public TokenInfo setCreatedAt(Long createdAt) { + this.createdAt = createdAt; + return this; + } + + public Long getCreatedAt() { + return createdAt; + } + + public TokenInfo setCreatedBy(String createdBy) { + this.createdBy = createdBy; + return this; + } + + public String getCreatedBy() { + return createdBy; + } + + public TokenInfo setExpirationTime(Long expirationTime) { + this.expirationTime = expirationTime; + return this; + } + + public Long getExpirationTime() { + return expirationTime; + } + + public TokenInfo setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public TokenInfo setUpdatedAt(Long updatedAt) { + this.updatedAt = updatedAt; + return this; + } + + public Long getUpdatedAt() { + return updatedAt; + } + + public TokenInfo setUpdatedBy(String updatedBy) { + this.updatedBy = updatedBy; + return this; + } + + public String getUpdatedBy() { + return updatedBy; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TokenInfo that = (TokenInfo) o; + return Objects.equals(activationUrl, that.activationUrl) + && Objects.equals(createdAt, that.createdAt) + && Objects.equals(createdBy, that.createdBy) + && Objects.equals(expirationTime, that.expirationTime) + && Objects.equals(id, that.id) + && Objects.equals(updatedAt, that.updatedAt) + && Objects.equals(updatedBy, that.updatedBy); + } + + @Override + public int hashCode() { + return Objects.hash( + activationUrl, createdAt, createdBy, expirationTime, id, updatedAt, updatedBy); + } + + @Override + public String toString() { + return new ToStringer(TokenInfo.class) + .add("activationUrl", activationUrl) + .add("createdAt", createdAt) + .add("createdBy", createdBy) + .add("expirationTime", expirationTime) + .add("id", id) + .add("updatedAt", updatedAt) + .add("updatedBy", updatedBy) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeFilterRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeFilterRequest.java new file mode 100755 index 000000000..13b1869f5 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeFilterRequest.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateExchangeFilterRequest { + /** */ + @JsonProperty("filter") + private ExchangeFilter filter; + + /** */ + private String id; + + public UpdateExchangeFilterRequest setFilter(ExchangeFilter filter) { + this.filter = filter; + return this; + } + + public ExchangeFilter getFilter() { + return filter; + } + + public UpdateExchangeFilterRequest setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateExchangeFilterRequest that = (UpdateExchangeFilterRequest) o; + return Objects.equals(filter, that.filter) && Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(filter, id); + } + + @Override + public String toString() { + return new ToStringer(UpdateExchangeFilterRequest.class) + .add("filter", filter) + .add("id", id) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeFilterResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeFilterResponse.java new file mode 100755 index 000000000..fd76d6eb7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeFilterResponse.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateExchangeFilterResponse { + /** */ + @JsonProperty("filter") + private ExchangeFilter filter; + + public UpdateExchangeFilterResponse setFilter(ExchangeFilter filter) { + this.filter = filter; + return this; + } + + public ExchangeFilter getFilter() { + return filter; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateExchangeFilterResponse that = (UpdateExchangeFilterResponse) o; + return Objects.equals(filter, that.filter); + } + + @Override + public int hashCode() { + return Objects.hash(filter); + } + + @Override + public String toString() { + return new ToStringer(UpdateExchangeFilterResponse.class).add("filter", filter).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeRequest.java new file mode 100755 index 000000000..44e133d62 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeRequest.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateExchangeRequest { + /** */ + @JsonProperty("exchange") + private Exchange exchange; + + /** */ + private String id; + + public UpdateExchangeRequest setExchange(Exchange exchange) { + this.exchange = exchange; + return this; + } + + public Exchange getExchange() { + return exchange; + } + + public UpdateExchangeRequest setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateExchangeRequest that = (UpdateExchangeRequest) o; + return Objects.equals(exchange, that.exchange) && Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(exchange, id); + } + + @Override + public String toString() { + return new ToStringer(UpdateExchangeRequest.class) + .add("exchange", exchange) + .add("id", id) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeResponse.java new file mode 100755 index 000000000..d7d55e34f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateExchangeResponse.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateExchangeResponse { + /** */ + @JsonProperty("exchange") + private Exchange exchange; + + public UpdateExchangeResponse setExchange(Exchange exchange) { + this.exchange = exchange; + return this; + } + + public Exchange getExchange() { + return exchange; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateExchangeResponse that = (UpdateExchangeResponse) o; + return Objects.equals(exchange, that.exchange); + } + + @Override + public int hashCode() { + return Objects.hash(exchange); + } + + @Override + public String toString() { + return new ToStringer(UpdateExchangeResponse.class).add("exchange", exchange).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateInstallationRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateInstallationRequest.java new file mode 100755 index 000000000..006d86ddd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateInstallationRequest.java @@ -0,0 +1,87 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateInstallationRequest { + /** */ + @JsonProperty("installation") + private InstallationDetail installation; + + /** */ + private String installationId; + + /** */ + private String listingId; + + /** */ + @JsonProperty("rotate_token") + private Boolean rotateToken; + + public UpdateInstallationRequest setInstallation(InstallationDetail installation) { + this.installation = installation; + return this; + } + + public InstallationDetail getInstallation() { + return installation; + } + + public UpdateInstallationRequest setInstallationId(String installationId) { + this.installationId = installationId; + return this; + } + + public String getInstallationId() { + return installationId; + } + + public UpdateInstallationRequest setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + public UpdateInstallationRequest setRotateToken(Boolean rotateToken) { + this.rotateToken = rotateToken; + return this; + } + + public Boolean getRotateToken() { + return rotateToken; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateInstallationRequest that = (UpdateInstallationRequest) o; + return Objects.equals(installation, that.installation) + && Objects.equals(installationId, that.installationId) + && Objects.equals(listingId, that.listingId) + && Objects.equals(rotateToken, that.rotateToken); + } + + @Override + public int hashCode() { + return Objects.hash(installation, installationId, listingId, rotateToken); + } + + @Override + public String toString() { + return new ToStringer(UpdateInstallationRequest.class) + .add("installation", installation) + .add("installationId", installationId) + .add("listingId", listingId) + .add("rotateToken", rotateToken) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateInstallationResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateInstallationResponse.java new file mode 100755 index 000000000..df52570a6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateInstallationResponse.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateInstallationResponse { + /** */ + @JsonProperty("installation") + private InstallationDetail installation; + + public UpdateInstallationResponse setInstallation(InstallationDetail installation) { + this.installation = installation; + return this; + } + + public InstallationDetail getInstallation() { + return installation; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateInstallationResponse that = (UpdateInstallationResponse) o; + return Objects.equals(installation, that.installation); + } + + @Override + public int hashCode() { + return Objects.hash(installation); + } + + @Override + public String toString() { + return new ToStringer(UpdateInstallationResponse.class) + .add("installation", installation) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateListingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateListingRequest.java new file mode 100755 index 000000000..11f50b317 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateListingRequest.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateListingRequest { + /** */ + private String id; + + /** */ + @JsonProperty("listing") + private Listing listing; + + public UpdateListingRequest setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public UpdateListingRequest setListing(Listing listing) { + this.listing = listing; + return this; + } + + public Listing getListing() { + return listing; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateListingRequest that = (UpdateListingRequest) o; + return Objects.equals(id, that.id) && Objects.equals(listing, that.listing); + } + + @Override + public int hashCode() { + return Objects.hash(id, listing); + } + + @Override + public String toString() { + return new ToStringer(UpdateListingRequest.class) + .add("id", id) + .add("listing", listing) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateListingResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateListingResponse.java new file mode 100755 index 000000000..10c05956d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateListingResponse.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateListingResponse { + /** */ + @JsonProperty("listing") + private Listing listing; + + public UpdateListingResponse setListing(Listing listing) { + this.listing = listing; + return this; + } + + public Listing getListing() { + return listing; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateListingResponse that = (UpdateListingResponse) o; + return Objects.equals(listing, that.listing); + } + + @Override + public int hashCode() { + return Objects.hash(listing); + } + + @Override + public String toString() { + return new ToStringer(UpdateListingResponse.class).add("listing", listing).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdatePersonalizationRequestRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdatePersonalizationRequestRequest.java new file mode 100755 index 000000000..94d028560 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdatePersonalizationRequestRequest.java @@ -0,0 +1,102 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdatePersonalizationRequestRequest { + /** */ + private String listingId; + + /** */ + @JsonProperty("reason") + private String reason; + + /** */ + private String requestId; + + /** */ + @JsonProperty("share") + private ShareInfo share; + + /** */ + @JsonProperty("status") + private PersonalizationRequestStatus status; + + public UpdatePersonalizationRequestRequest setListingId(String listingId) { + this.listingId = listingId; + return this; + } + + public String getListingId() { + return listingId; + } + + public UpdatePersonalizationRequestRequest setReason(String reason) { + this.reason = reason; + return this; + } + + public String getReason() { + return reason; + } + + public UpdatePersonalizationRequestRequest setRequestId(String requestId) { + this.requestId = requestId; + return this; + } + + public String getRequestId() { + return requestId; + } + + public UpdatePersonalizationRequestRequest setShare(ShareInfo share) { + this.share = share; + return this; + } + + public ShareInfo getShare() { + return share; + } + + public UpdatePersonalizationRequestRequest setStatus(PersonalizationRequestStatus status) { + this.status = status; + return this; + } + + public PersonalizationRequestStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdatePersonalizationRequestRequest that = (UpdatePersonalizationRequestRequest) o; + return Objects.equals(listingId, that.listingId) + && Objects.equals(reason, that.reason) + && Objects.equals(requestId, that.requestId) + && Objects.equals(share, that.share) + && Objects.equals(status, that.status); + } + + @Override + public int hashCode() { + return Objects.hash(listingId, reason, requestId, share, status); + } + + @Override + public String toString() { + return new ToStringer(UpdatePersonalizationRequestRequest.class) + .add("listingId", listingId) + .add("reason", reason) + .add("requestId", requestId) + .add("share", share) + .add("status", status) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdatePersonalizationRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdatePersonalizationRequestResponse.java new file mode 100755 index 000000000..8a17caf4a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdatePersonalizationRequestResponse.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdatePersonalizationRequestResponse { + /** */ + @JsonProperty("request") + private PersonalizationRequest request; + + public UpdatePersonalizationRequestResponse setRequest(PersonalizationRequest request) { + this.request = request; + return this; + } + + public PersonalizationRequest getRequest() { + return request; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdatePersonalizationRequestResponse that = (UpdatePersonalizationRequestResponse) o; + return Objects.equals(request, that.request); + } + + @Override + public int hashCode() { + return Objects.hash(request); + } + + @Override + public String toString() { + return new ToStringer(UpdatePersonalizationRequestResponse.class) + .add("request", request) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderAnalyticsDashboardRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderAnalyticsDashboardRequest.java new file mode 100755 index 000000000..cb084bf70 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderAnalyticsDashboardRequest.java @@ -0,0 +1,60 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateProviderAnalyticsDashboardRequest { + /** id is immutable property and can't be updated. */ + private String id; + + /** + * this is the version of the dashboard template we want to update our user to current expectation + * is that it should be equal to latest version of the dashboard template + */ + @JsonProperty("version") + private Long version; + + public UpdateProviderAnalyticsDashboardRequest setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public UpdateProviderAnalyticsDashboardRequest setVersion(Long version) { + this.version = version; + return this; + } + + public Long getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateProviderAnalyticsDashboardRequest that = (UpdateProviderAnalyticsDashboardRequest) o; + return Objects.equals(id, that.id) && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(id, version); + } + + @Override + public String toString() { + return new ToStringer(UpdateProviderAnalyticsDashboardRequest.class) + .add("id", id) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderAnalyticsDashboardResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderAnalyticsDashboardResponse.java new file mode 100755 index 000000000..e679a5aa9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderAnalyticsDashboardResponse.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateProviderAnalyticsDashboardResponse { + /** this is newly created Lakeview dashboard for the user */ + @JsonProperty("dashboard_id") + private String dashboardId; + + /** id & version should be the same as the request */ + @JsonProperty("id") + private String id; + + /** */ + @JsonProperty("version") + private Long version; + + public UpdateProviderAnalyticsDashboardResponse setDashboardId(String dashboardId) { + this.dashboardId = dashboardId; + return this; + } + + public String getDashboardId() { + return dashboardId; + } + + public UpdateProviderAnalyticsDashboardResponse setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public UpdateProviderAnalyticsDashboardResponse setVersion(Long version) { + this.version = version; + return this; + } + + public Long getVersion() { + return version; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateProviderAnalyticsDashboardResponse that = (UpdateProviderAnalyticsDashboardResponse) o; + return Objects.equals(dashboardId, that.dashboardId) + && Objects.equals(id, that.id) + && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(dashboardId, id, version); + } + + @Override + public String toString() { + return new ToStringer(UpdateProviderAnalyticsDashboardResponse.class) + .add("dashboardId", dashboardId) + .add("id", id) + .add("version", version) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderRequest.java new file mode 100755 index 000000000..9695361de --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderRequest.java @@ -0,0 +1,57 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateProviderRequest { + /** */ + private String id; + + /** */ + @JsonProperty("provider") + private ProviderInfo provider; + + public UpdateProviderRequest setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public UpdateProviderRequest setProvider(ProviderInfo provider) { + this.provider = provider; + return this; + } + + public ProviderInfo getProvider() { + return provider; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateProviderRequest that = (UpdateProviderRequest) o; + return Objects.equals(id, that.id) && Objects.equals(provider, that.provider); + } + + @Override + public int hashCode() { + return Objects.hash(id, provider); + } + + @Override + public String toString() { + return new ToStringer(UpdateProviderRequest.class) + .add("id", id) + .add("provider", provider) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderResponse.java new file mode 100755 index 000000000..81323258a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/UpdateProviderResponse.java @@ -0,0 +1,42 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class UpdateProviderResponse { + /** */ + @JsonProperty("provider") + private ProviderInfo provider; + + public UpdateProviderResponse setProvider(ProviderInfo provider) { + this.provider = provider; + return this; + } + + public ProviderInfo getProvider() { + return provider; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UpdateProviderResponse that = (UpdateProviderResponse) o; + return Objects.equals(provider, that.provider); + } + + @Override + public int hashCode() { + return Objects.hash(provider); + } + + @Override + public String toString() { + return new ToStringer(UpdateProviderResponse.class).add("provider", provider).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Visibility.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Visibility.java new file mode 100755 index 000000000..abbb62a3f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/Visibility.java @@ -0,0 +1,11 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; + +@Generated +public enum Visibility { + PRIVATE, + PUBLIC, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/VisibilityFilter.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/VisibilityFilter.java new file mode 100755 index 000000000..00d54de4a --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/marketplace/VisibilityFilter.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.marketplace; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class VisibilityFilter { + /** */ + @JsonProperty("filterType") + private FilterType filterType; + + /** */ + @JsonProperty("filterValue") + private String filterValue; + + public VisibilityFilter setFilterType(FilterType filterType) { + this.filterType = filterType; + return this; + } + + public FilterType getFilterType() { + return filterType; + } + + public VisibilityFilter setFilterValue(String filterValue) { + this.filterValue = filterValue; + return this; + } + + public String getFilterValue() { + return filterValue; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + VisibilityFilter that = (VisibilityFilter) o; + return Objects.equals(filterType, that.filterType) + && Objects.equals(filterValue, that.filterValue); + } + + @Override + public int hashCode() { + return Objects.hash(filterType, filterValue); + } + + @Override + public String toString() { + return new ToStringer(VisibilityFilter.class) + .add("filterType", filterType) + .add("filterValue", filterValue) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryAPI.java index dd47f3d91..e034e00dd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ModelRegistryAPI.java @@ -232,7 +232,11 @@ public Iterable getLatestVersions(String name) { *

Gets the latest version of a registered model. */ public Iterable getLatestVersions(GetLatestVersionsRequest request) { - return impl.getLatestVersions(request).getModelVersions(); + return new Paginator<>( + request, + impl::getLatestVersions, + GetLatestVersionsResponse::getModelVersions, + response -> null); } public GetModelResponse getModel(String name) { @@ -341,7 +345,11 @@ public Iterable listTransitionRequests(String name, String version) { *

Gets a list of all open stage transition requests for the model version. */ public Iterable listTransitionRequests(ListTransitionRequestsRequest request) { - return impl.listTransitionRequests(request).getRequests(); + return new Paginator<>( + request, + impl::listTransitionRequests, + ListTransitionRequestsResponse::getRequests, + response -> null); } /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationAPI.java index 2b380eac0..b1a56ae8c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/CustomAppIntegrationAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import java.util.Collection; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -75,7 +76,8 @@ public GetCustomAppIntegrationOutput get(GetCustomAppIntegrationRequest request) *

Get the list of custom oauth app integrations for the specified Databricks account */ public Iterable list() { - return impl.list().getApps(); + return new Paginator<>( + null, (Void v) -> impl.list(), GetCustomAppIntegrationsOutput::getApps, response -> null); } public void update(String integrationId) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppIntegrationAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppIntegrationAPI.java index 758bbe100..ec65c559c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppIntegrationAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/PublishedAppIntegrationAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -72,7 +73,11 @@ public GetPublishedAppIntegrationOutput get(GetPublishedAppIntegrationRequest re *

Get the list of published oauth app integrations for the specified Databricks account */ public Iterable list() { - return impl.list().getApps(); + return new Paginator<>( + null, + (Void v) -> impl.list(), + GetPublishedAppIntegrationsOutput::getApps, + response -> null); } public void update(String integrationId) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsAPI.java index 7444c63a8..bbadb41e0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/oauth2/ServicePrincipalSecretsAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -78,7 +79,8 @@ public Iterable list(long servicePrincipalId) { * information about the secrets themselves and does not include the secret values. */ public Iterable list(ListServicePrincipalSecretsRequest request) { - return impl.list(request).getSecrets(); + return new Paginator<>( + request, impl::list, ListServicePrincipalSecretsResponse::getSecrets, response -> null); } public ServicePrincipalSecretsService impl() { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java index eb8c50728..a296ae89e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java @@ -40,6 +40,10 @@ public class CreatePipeline { @JsonProperty("continuous") private Boolean continuous; + /** Deployment type of this pipeline. */ + @JsonProperty("deployment") + private PipelineDeployment deployment; + /** Whether the pipeline is in Development mode. Defaults to false. */ @JsonProperty("development") private Boolean development; @@ -150,6 +154,15 @@ public Boolean getContinuous() { return continuous; } + public CreatePipeline setDeployment(PipelineDeployment deployment) { + this.deployment = deployment; + return this; + } + + public PipelineDeployment getDeployment() { + return deployment; + } + public CreatePipeline setDevelopment(Boolean development) { this.development = development; return this; @@ -278,6 +291,7 @@ public boolean equals(Object o) { && Objects.equals(clusters, that.clusters) && Objects.equals(configuration, that.configuration) && Objects.equals(continuous, that.continuous) + && Objects.equals(deployment, that.deployment) && Objects.equals(development, that.development) && Objects.equals(dryRun, that.dryRun) && Objects.equals(edition, that.edition) @@ -302,6 +316,7 @@ public int hashCode() { clusters, configuration, continuous, + deployment, development, dryRun, edition, @@ -326,6 +341,7 @@ public String toString() { .add("clusters", clusters) .add("configuration", configuration) .add("continuous", continuous) + .add("deployment", deployment) .add("development", development) .add("dryRun", dryRun) .add("edition", edition) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeploymentKind.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeploymentKind.java new file mode 100755 index 000000000..450900a0d --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/DeploymentKind.java @@ -0,0 +1,14 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; + +/** + * The deployment method that manages the pipeline: - BUNDLE: The pipeline is managed by a + * Databricks Asset Bundle. + */ +@Generated +public enum DeploymentKind { + BUNDLE, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java index 18597b5bd..995ceea26 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java @@ -42,6 +42,10 @@ public class EditPipeline { @JsonProperty("continuous") private Boolean continuous; + /** Deployment type of this pipeline. */ + @JsonProperty("deployment") + private PipelineDeployment deployment; + /** Whether the pipeline is in Development mode. Defaults to false. */ @JsonProperty("development") private Boolean development; @@ -159,6 +163,15 @@ public Boolean getContinuous() { return continuous; } + public EditPipeline setDeployment(PipelineDeployment deployment) { + this.deployment = deployment; + return this; + } + + public PipelineDeployment getDeployment() { + return deployment; + } + public EditPipeline setDevelopment(Boolean development) { this.development = development; return this; @@ -296,6 +309,7 @@ public boolean equals(Object o) { && Objects.equals(clusters, that.clusters) && Objects.equals(configuration, that.configuration) && Objects.equals(continuous, that.continuous) + && Objects.equals(deployment, that.deployment) && Objects.equals(development, that.development) && Objects.equals(edition, that.edition) && Objects.equals(expectedLastModified, that.expectedLastModified) @@ -321,6 +335,7 @@ public int hashCode() { clusters, configuration, continuous, + deployment, development, edition, expectedLastModified, @@ -346,6 +361,7 @@ public String toString() { .add("clusters", clusters) .add("configuration", configuration) .add("continuous", continuous) + .add("deployment", deployment) .add("development", development) .add("edition", edition) .add("expectedLastModified", expectedLastModified) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineDeployment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineDeployment.java new file mode 100755 index 000000000..579d9f1f1 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineDeployment.java @@ -0,0 +1,59 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.pipelines; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +@Generated +public class PipelineDeployment { + /** The deployment method that manages the pipeline. */ + @JsonProperty("kind") + private DeploymentKind kind; + + /** The path to the file containing metadata about the deployment. */ + @JsonProperty("metadata_file_path") + private String metadataFilePath; + + public PipelineDeployment setKind(DeploymentKind kind) { + this.kind = kind; + return this; + } + + public DeploymentKind getKind() { + return kind; + } + + public PipelineDeployment setMetadataFilePath(String metadataFilePath) { + this.metadataFilePath = metadataFilePath; + return this; + } + + public String getMetadataFilePath() { + return metadataFilePath; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PipelineDeployment that = (PipelineDeployment) o; + return Objects.equals(kind, that.kind) + && Objects.equals(metadataFilePath, that.metadataFilePath); + } + + @Override + public int hashCode() { + return Objects.hash(kind, metadataFilePath); + } + + @Override + public String toString() { + return new ToStringer(PipelineDeployment.class) + .add("kind", kind) + .add("metadataFilePath", metadataFilePath) + .toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java index e0b278a5f..8d8288b77 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java @@ -36,6 +36,10 @@ public class PipelineSpec { @JsonProperty("continuous") private Boolean continuous; + /** Deployment type of this pipeline. */ + @JsonProperty("deployment") + private PipelineDeployment deployment; + /** Whether the pipeline is in Development mode. Defaults to false. */ @JsonProperty("development") private Boolean development; @@ -133,6 +137,15 @@ public Boolean getContinuous() { return continuous; } + public PipelineSpec setDeployment(PipelineDeployment deployment) { + this.deployment = deployment; + return this; + } + + public PipelineDeployment getDeployment() { + return deployment; + } + public PipelineSpec setDevelopment(Boolean development) { this.development = development; return this; @@ -251,6 +264,7 @@ public boolean equals(Object o) { && Objects.equals(clusters, that.clusters) && Objects.equals(configuration, that.configuration) && Objects.equals(continuous, that.continuous) + && Objects.equals(deployment, that.deployment) && Objects.equals(development, that.development) && Objects.equals(edition, that.edition) && Objects.equals(filters, that.filters) @@ -273,6 +287,7 @@ public int hashCode() { clusters, configuration, continuous, + deployment, development, edition, filters, @@ -295,6 +310,7 @@ public String toString() { .add("clusters", clusters) .add("configuration", configuration) .add("continuous", continuous) + .add("deployment", deployment) .add("development", development) .add("edition", edition) .add("filters", filters) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequest.java index ac8b3e2be..5c9d6f96c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/UpdateWorkspaceRequest.java @@ -39,10 +39,7 @@ public class UpdateWorkspaceRequest { @JsonProperty("managed_services_customer_managed_key_id") private String managedServicesCustomerManagedKeyId; - /** - * The ID of the network connectivity configuration object, which is the parent resource of this - * private endpoint rule object. - */ + /** */ @JsonProperty("network_connectivity_config_id") private String networkConnectivityConfigId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesAPI.java index 91072a5b5..054a3f838 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesAPI.java @@ -187,7 +187,11 @@ public Wait update(long workspaceId) { * update the private access settings ID to upgrade a workspace to add support for front-end, * back-end, or both types of connectivity. You cannot remove (downgrade) any existing front-end * or back-end PrivateLink support on a workspace. - Custom tags. Given you provide an empty - * custom tags, the update would not be applied. + * custom tags, the update would not be applied. - Network connectivity configuration ID to add + * serverless stable IP support. You can add or update the network connectivity configuration ID + * to ensure the workspace uses the same set of stable IP CIDR blocks to access your resources. + * You cannot remove a network connectivity configuration from the workspace once attached, you + * can only switch to another one. * *

After calling the `PATCH` operation to update the workspace configuration, make repeated * `GET` requests with the workspace ID and check the workspace status. The workspace is @@ -198,15 +202,13 @@ public Wait update(long workspaceId) { * *

### Update a running workspace You can update a Databricks workspace configuration for * running workspaces for some fields, but not all fields. For a running workspace, this request - * supports updating the following fields only: - Credential configuration ID - * - *

- Network configuration ID. Used only if you already use a customer-managed VPC. You cannot - * convert a running workspace from a Databricks-managed VPC to a customer-managed VPC. You can - * use a network configuration update in this API for a failed or running workspace to add support - * for PrivateLink, although you also need to add a private access settings object. - * - *

- Key configuration ID for managed services (control plane storage, such as notebook source - * and Databricks SQL queries). Databricks does not directly encrypt the data with the + * supports updating the following fields only: - Credential configuration ID - Network + * configuration ID. Used only if you already use a customer-managed VPC. You cannot convert a + * running workspace from a Databricks-managed VPC to a customer-managed VPC. You can use a + * network configuration update in this API for a failed or running workspace to add support for + * PrivateLink, although you also need to add a private access settings object. - Key + * configuration ID for managed services (control plane storage, such as notebook source and + * Databricks SQL queries). Databricks does not directly encrypt the data with the * customer-managed key (CMK). Databricks uses both the CMK and the Databricks managed key (DMK) * that is unique to your workspace to encrypt the Data Encryption Key (DEK). Databricks uses the * DEK to encrypt your workspace's managed services persisted data. If the workspace does not @@ -221,7 +223,10 @@ public Wait update(long workspaceId) { * upgrade a workspace to add support for front-end, back-end, or both types of connectivity. You * cannot remove (downgrade) any existing front-end or back-end PrivateLink support on a * workspace. - Custom tags. Given you provide an empty custom tags, the update would not be - * applied. + * applied. - Network connectivity configuration ID to add serverless stable IP support. You can + * add or update the network connectivity configuration ID to ensure the workspace uses the same + * set of stable IP CIDR blocks to access your resources. You cannot remove a network connectivity + * configuration from the workspace once attached, you can only switch to another one. * *

**Important**: To update a running workspace, your workspace must have no running compute * resources that run in your workspace's VPC in the Classic data plane. For example, stop all @@ -238,10 +243,8 @@ public Wait update(long workspaceId) { * clusters for another 20 minutes after that status change. This results in a total of up to 40 * minutes in which you cannot create clusters. If you create or use clusters before this time * interval elapses, clusters do not launch successfully, fail, or could cause other unexpected - * behavior. - * - *

* For workspaces with a customer-managed VPC, the workspace status stays at status `RUNNING` - * and the VPC change happens immediately. A change to the storage customer-managed key + * behavior. * For workspaces with a customer-managed VPC, the workspace status stays at status + * `RUNNING` and the VPC change happens immediately. A change to the storage customer-managed key * configuration ID might take a few minutes to update, so continue to check the workspace until * you observe that it has been updated. If the update fails, the workspace might revert silently * to its original configuration. After the workspace has been updated, you cannot use or create diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesService.java index 3869ad9d8..a9c9dcfa0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesService.java @@ -98,7 +98,11 @@ public interface WorkspacesService { * update the private access settings ID to upgrade a workspace to add support for front-end, * back-end, or both types of connectivity. You cannot remove (downgrade) any existing front-end * or back-end PrivateLink support on a workspace. - Custom tags. Given you provide an empty - * custom tags, the update would not be applied. + * custom tags, the update would not be applied. - Network connectivity configuration ID to add + * serverless stable IP support. You can add or update the network connectivity configuration ID + * to ensure the workspace uses the same set of stable IP CIDR blocks to access your resources. + * You cannot remove a network connectivity configuration from the workspace once attached, you + * can only switch to another one. * *

After calling the `PATCH` operation to update the workspace configuration, make repeated * `GET` requests with the workspace ID and check the workspace status. The workspace is @@ -109,15 +113,13 @@ public interface WorkspacesService { * *

### Update a running workspace You can update a Databricks workspace configuration for * running workspaces for some fields, but not all fields. For a running workspace, this request - * supports updating the following fields only: - Credential configuration ID - * - *

- Network configuration ID. Used only if you already use a customer-managed VPC. You cannot - * convert a running workspace from a Databricks-managed VPC to a customer-managed VPC. You can - * use a network configuration update in this API for a failed or running workspace to add support - * for PrivateLink, although you also need to add a private access settings object. - * - *

- Key configuration ID for managed services (control plane storage, such as notebook source - * and Databricks SQL queries). Databricks does not directly encrypt the data with the + * supports updating the following fields only: - Credential configuration ID - Network + * configuration ID. Used only if you already use a customer-managed VPC. You cannot convert a + * running workspace from a Databricks-managed VPC to a customer-managed VPC. You can use a + * network configuration update in this API for a failed or running workspace to add support for + * PrivateLink, although you also need to add a private access settings object. - Key + * configuration ID for managed services (control plane storage, such as notebook source and + * Databricks SQL queries). Databricks does not directly encrypt the data with the * customer-managed key (CMK). Databricks uses both the CMK and the Databricks managed key (DMK) * that is unique to your workspace to encrypt the Data Encryption Key (DEK). Databricks uses the * DEK to encrypt your workspace's managed services persisted data. If the workspace does not @@ -132,7 +134,10 @@ public interface WorkspacesService { * upgrade a workspace to add support for front-end, back-end, or both types of connectivity. You * cannot remove (downgrade) any existing front-end or back-end PrivateLink support on a * workspace. - Custom tags. Given you provide an empty custom tags, the update would not be - * applied. + * applied. - Network connectivity configuration ID to add serverless stable IP support. You can + * add or update the network connectivity configuration ID to ensure the workspace uses the same + * set of stable IP CIDR blocks to access your resources. You cannot remove a network connectivity + * configuration from the workspace once attached, you can only switch to another one. * *

**Important**: To update a running workspace, your workspace must have no running compute * resources that run in your workspace's VPC in the Classic data plane. For example, stop all @@ -149,10 +154,8 @@ public interface WorkspacesService { * clusters for another 20 minutes after that status change. This results in a total of up to 40 * minutes in which you cannot create clusters. If you create or use clusters before this time * interval elapses, clusters do not launch successfully, fail, or could cause other unexpected - * behavior. - * - *

* For workspaces with a customer-managed VPC, the workspace status stays at status `RUNNING` - * and the VPC change happens immediately. A change to the storage customer-managed key + * behavior. * For workspaces with a customer-managed VPC, the workspace status stays at status + * `RUNNING` and the VPC change happens immediately. A change to the storage customer-managed key * configuration ID might take a few minutes to update, so continue to check the workspace until * you observe that it has been updated. If the update fails, the workspace might revert silently * to its original configuration. After the workspace has been updated, you cannot use or create diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AwsBedrockConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfig.java similarity index 75% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AwsBedrockConfig.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfig.java index c8ce8ad1d..a53311847 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AwsBedrockConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfig.java @@ -8,7 +8,7 @@ import java.util.Objects; @Generated -public class AwsBedrockConfig { +public class AmazonBedrockConfig { /** * The Databricks secret key reference for an AWS Access Key ID with permissions to interact with * Bedrock services. @@ -28,13 +28,13 @@ public class AwsBedrockConfig { private String awsSecretAccessKey; /** - * The underlying provider in AWS Bedrock. Supported values (case insensitive) include: Anthropic, - * Cohere, AI21Labs, Amazon. + * The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: + * Anthropic, Cohere, AI21Labs, Amazon. */ @JsonProperty("bedrock_provider") - private AwsBedrockConfigBedrockProvider bedrockProvider; + private AmazonBedrockConfigBedrockProvider bedrockProvider; - public AwsBedrockConfig setAwsAccessKeyId(String awsAccessKeyId) { + public AmazonBedrockConfig setAwsAccessKeyId(String awsAccessKeyId) { this.awsAccessKeyId = awsAccessKeyId; return this; } @@ -43,7 +43,7 @@ public String getAwsAccessKeyId() { return awsAccessKeyId; } - public AwsBedrockConfig setAwsRegion(String awsRegion) { + public AmazonBedrockConfig setAwsRegion(String awsRegion) { this.awsRegion = awsRegion; return this; } @@ -52,7 +52,7 @@ public String getAwsRegion() { return awsRegion; } - public AwsBedrockConfig setAwsSecretAccessKey(String awsSecretAccessKey) { + public AmazonBedrockConfig setAwsSecretAccessKey(String awsSecretAccessKey) { this.awsSecretAccessKey = awsSecretAccessKey; return this; } @@ -61,12 +61,13 @@ public String getAwsSecretAccessKey() { return awsSecretAccessKey; } - public AwsBedrockConfig setBedrockProvider(AwsBedrockConfigBedrockProvider bedrockProvider) { + public AmazonBedrockConfig setBedrockProvider( + AmazonBedrockConfigBedrockProvider bedrockProvider) { this.bedrockProvider = bedrockProvider; return this; } - public AwsBedrockConfigBedrockProvider getBedrockProvider() { + public AmazonBedrockConfigBedrockProvider getBedrockProvider() { return bedrockProvider; } @@ -74,7 +75,7 @@ public AwsBedrockConfigBedrockProvider getBedrockProvider() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - AwsBedrockConfig that = (AwsBedrockConfig) o; + AmazonBedrockConfig that = (AmazonBedrockConfig) o; return Objects.equals(awsAccessKeyId, that.awsAccessKeyId) && Objects.equals(awsRegion, that.awsRegion) && Objects.equals(awsSecretAccessKey, that.awsSecretAccessKey) @@ -88,7 +89,7 @@ public int hashCode() { @Override public String toString() { - return new ToStringer(AwsBedrockConfig.class) + return new ToStringer(AmazonBedrockConfig.class) .add("awsAccessKeyId", awsAccessKeyId) .add("awsRegion", awsRegion) .add("awsSecretAccessKey", awsSecretAccessKey) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AwsBedrockConfigBedrockProvider.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfigBedrockProvider.java similarity index 69% rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AwsBedrockConfigBedrockProvider.java rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfigBedrockProvider.java index b7d7b93a2..4a4516289 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AwsBedrockConfigBedrockProvider.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfigBedrockProvider.java @@ -6,11 +6,11 @@ import com.fasterxml.jackson.annotation.JsonProperty; /** - * The underlying provider in AWS Bedrock. Supported values (case insensitive) include: Anthropic, - * Cohere, AI21Labs, Amazon. + * The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: + * Anthropic, Cohere, AI21Labs, Amazon. */ @Generated -public enum AwsBedrockConfigBedrockProvider { +public enum AmazonBedrockConfigBedrockProvider { @JsonProperty("ai21labs") AI21LABS, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BuildLogsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BuildLogsRequest.java index 7c20f2efe..54558cb79 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BuildLogsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/BuildLogsRequest.java @@ -6,10 +6,7 @@ import com.databricks.sdk.support.ToStringer; import java.util.Objects; -/** - * Retrieve the logs associated with building the model's environment for a given serving endpoint's - * served model. - */ +/** Get build logs for a served model */ @Generated public class BuildLogsRequest { /** The name of the serving endpoint that the served model belongs to. This field is required. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigInput.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigInput.java index 147006edd..f7fd03f75 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigInput.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointCoreConfigInput.java @@ -21,7 +21,7 @@ public class EndpointCoreConfigInput { private String name; /** - * A list of served entities for the endpoint to serve. A serving endpoint can have up to 10 + * A list of served entities for the endpoint to serve. A serving endpoint can have up to 15 * served entities. */ @JsonProperty("served_entities") @@ -29,7 +29,7 @@ public class EndpointCoreConfigInput { /** * (Deprecated, use served_entities instead) A list of served models for the endpoint to serve. A - * serving endpoint can have up to 10 served models. + * serving endpoint can have up to 15 served models. */ @JsonProperty("served_models") private Collection servedModels; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointPendingConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointPendingConfig.java index 11e71276b..6f635ed13 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointPendingConfig.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/EndpointPendingConfig.java @@ -10,6 +10,13 @@ @Generated public class EndpointPendingConfig { + /** + * Configuration for Inference Tables which automatically logs requests and responses to Unity + * Catalog. + */ + @JsonProperty("auto_capture_config") + private AutoCaptureConfigOutput autoCaptureConfig; + /** The config version that the serving endpoint is currently serving. */ @JsonProperty("config_version") private Long configVersion; @@ -33,6 +40,15 @@ public class EndpointPendingConfig { @JsonProperty("traffic_config") private TrafficConfig trafficConfig; + public EndpointPendingConfig setAutoCaptureConfig(AutoCaptureConfigOutput autoCaptureConfig) { + this.autoCaptureConfig = autoCaptureConfig; + return this; + } + + public AutoCaptureConfigOutput getAutoCaptureConfig() { + return autoCaptureConfig; + } + public EndpointPendingConfig setConfigVersion(Long configVersion) { this.configVersion = configVersion; return this; @@ -83,7 +99,8 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; EndpointPendingConfig that = (EndpointPendingConfig) o; - return Objects.equals(configVersion, that.configVersion) + return Objects.equals(autoCaptureConfig, that.autoCaptureConfig) + && Objects.equals(configVersion, that.configVersion) && Objects.equals(servedEntities, that.servedEntities) && Objects.equals(servedModels, that.servedModels) && Objects.equals(startTime, that.startTime) @@ -92,12 +109,14 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(configVersion, servedEntities, servedModels, startTime, trafficConfig); + return Objects.hash( + autoCaptureConfig, configVersion, servedEntities, servedModels, startTime, trafficConfig); } @Override public String toString() { return new ToStringer(EndpointPendingConfig.class) + .add("autoCaptureConfig", autoCaptureConfig) .add("configVersion", configVersion) .add("servedEntities", servedEntities) .add("servedModels", servedModels) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExportMetricsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExportMetricsRequest.java index 7144a0c3b..bcdc26db2 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExportMetricsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExportMetricsRequest.java @@ -6,7 +6,7 @@ import com.databricks.sdk.support.ToStringer; import java.util.Objects; -/** Retrieve the metrics associated with a serving endpoint */ +/** Get metrics of a serving endpoint */ @Generated public class ExportMetricsRequest { /** The name of the serving endpoint to retrieve metrics for. This field is required. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModel.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModel.java index c1d606066..e5dd1a736 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModel.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModel.java @@ -13,14 +13,14 @@ public class ExternalModel { @JsonProperty("ai21labs_config") private Ai21LabsConfig ai21labsConfig; + /** Amazon Bedrock Config. Only required if the provider is 'amazon-bedrock'. */ + @JsonProperty("amazon_bedrock_config") + private AmazonBedrockConfig amazonBedrockConfig; + /** Anthropic Config. Only required if the provider is 'anthropic'. */ @JsonProperty("anthropic_config") private AnthropicConfig anthropicConfig; - /** AWS Bedrock Config. Only required if the provider is 'aws-bedrock'. */ - @JsonProperty("aws_bedrock_config") - private AwsBedrockConfig awsBedrockConfig; - /** Cohere Config. Only required if the provider is 'cohere'. */ @JsonProperty("cohere_config") private CohereConfig cohereConfig; @@ -45,7 +45,7 @@ public class ExternalModel { /** * The name of the provider for the external model. Currently, the supported providers are - * 'ai21labs', 'anthropic', 'aws-bedrock', 'cohere', 'databricks-model-serving', 'openai', and + * 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'openai', and * 'palm'.", */ @JsonProperty("provider") @@ -64,22 +64,22 @@ public Ai21LabsConfig getAi21labsConfig() { return ai21labsConfig; } - public ExternalModel setAnthropicConfig(AnthropicConfig anthropicConfig) { - this.anthropicConfig = anthropicConfig; + public ExternalModel setAmazonBedrockConfig(AmazonBedrockConfig amazonBedrockConfig) { + this.amazonBedrockConfig = amazonBedrockConfig; return this; } - public AnthropicConfig getAnthropicConfig() { - return anthropicConfig; + public AmazonBedrockConfig getAmazonBedrockConfig() { + return amazonBedrockConfig; } - public ExternalModel setAwsBedrockConfig(AwsBedrockConfig awsBedrockConfig) { - this.awsBedrockConfig = awsBedrockConfig; + public ExternalModel setAnthropicConfig(AnthropicConfig anthropicConfig) { + this.anthropicConfig = anthropicConfig; return this; } - public AwsBedrockConfig getAwsBedrockConfig() { - return awsBedrockConfig; + public AnthropicConfig getAnthropicConfig() { + return anthropicConfig; } public ExternalModel setCohereConfig(CohereConfig cohereConfig) { @@ -152,8 +152,8 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; ExternalModel that = (ExternalModel) o; return Objects.equals(ai21labsConfig, that.ai21labsConfig) + && Objects.equals(amazonBedrockConfig, that.amazonBedrockConfig) && Objects.equals(anthropicConfig, that.anthropicConfig) - && Objects.equals(awsBedrockConfig, that.awsBedrockConfig) && Objects.equals(cohereConfig, that.cohereConfig) && Objects.equals(databricksModelServingConfig, that.databricksModelServingConfig) && Objects.equals(name, that.name) @@ -167,8 +167,8 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash( ai21labsConfig, + amazonBedrockConfig, anthropicConfig, - awsBedrockConfig, cohereConfig, databricksModelServingConfig, name, @@ -182,8 +182,8 @@ public int hashCode() { public String toString() { return new ToStringer(ExternalModel.class) .add("ai21labsConfig", ai21labsConfig) + .add("amazonBedrockConfig", amazonBedrockConfig) .add("anthropicConfig", anthropicConfig) - .add("awsBedrockConfig", awsBedrockConfig) .add("cohereConfig", cohereConfig) .add("databricksModelServingConfig", databricksModelServingConfig) .add("name", name) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModelProvider.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModelProvider.java index daeecf987..5ab3302f4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModelProvider.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ExternalModelProvider.java @@ -7,7 +7,7 @@ /** * The name of the provider for the external model. Currently, the supported providers are - * 'ai21labs', 'anthropic', 'aws-bedrock', 'cohere', 'databricks-model-serving', 'openai', and + * 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'openai', and * 'palm'.", */ @Generated @@ -15,12 +15,12 @@ public enum ExternalModelProvider { @JsonProperty("ai21labs") AI21LABS, + @JsonProperty("amazon-bedrock") + AMAZON_BEDROCK, + @JsonProperty("anthropic") ANTHROPIC, - @JsonProperty("aws-bedrock") - AWS_BEDROCK, - @JsonProperty("cohere") COHERE, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/LogsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/LogsRequest.java index 50b14b273..5f2953b1e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/LogsRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/LogsRequest.java @@ -6,7 +6,7 @@ import com.databricks.sdk.support.ToStringer; import java.util.Objects; -/** Retrieve the most recent log lines associated with a given serving endpoint's served model */ +/** Get the latest logs for a served model */ @Generated public class LogsRequest { /** The name of the serving endpoint that the served model belongs to. This field is required. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutRequest.java index 144442330..a2796c8b5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutRequest.java @@ -8,7 +8,7 @@ import java.util.Collection; import java.util.Objects; -/** Update the rate limits of a serving endpoint */ +/** Update rate limits of a serving endpoint */ @Generated public class PutRequest { /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java index 96e1a7548..3e2b38cfd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import com.databricks.sdk.support.Wait; import java.time.Duration; import java.util.Arrays; @@ -92,8 +93,7 @@ public BuildLogsResponse buildLogs(String name, String servedModelName) { } /** - * Retrieve the logs associated with building the model's environment for a given serving - * endpoint's served model. + * Get build logs for a served model. * *

Retrieves the build logs associated with the provided served model. */ @@ -130,7 +130,7 @@ public void exportMetrics(String name) { } /** - * Retrieve the metrics associated with a serving endpoint. + * Get metrics of a serving endpoint. * *

Retrieves the metrics associated with the provided serving endpoint in either Prometheus or * OpenMetrics exposition format. @@ -182,9 +182,10 @@ public ServingEndpointPermissions getPermissions(GetServingEndpointPermissionsRe return impl.getPermissions(request); } - /** Retrieve all serving endpoints. */ + /** Get all serving endpoints. */ public Iterable list() { - return impl.list().getEndpoints(); + return new Paginator<>( + null, (Void v) -> impl.list(), ListEndpointsResponse::getEndpoints, response -> null); } public ServerLogsResponse logs(String name, String servedModelName) { @@ -192,7 +193,7 @@ public ServerLogsResponse logs(String name, String servedModelName) { } /** - * Retrieve the most recent log lines associated with a given serving endpoint's served model. + * Get the latest logs for a served model. * *

Retrieves the service logs associated with the provided served model. */ @@ -205,7 +206,7 @@ public Iterable patch(String name) { } /** - * Patch the tags of a serving endpoint. + * Update tags of a serving endpoint. * *

Used to batch add and delete tags from a serving endpoint with a single API call. */ @@ -218,7 +219,7 @@ public PutResponse put(String name) { } /** - * Update the rate limits of a serving endpoint. + * Update rate limits of a serving endpoint. * *

Used to update the rate limits of a serving endpoint. NOTE: only external and foundation * model endpoints are supported as of now. @@ -231,7 +232,7 @@ public QueryEndpointResponse query(String name) { return query(new QueryEndpointInput().setName(name)); } - /** Query a serving endpoint with provided model input. */ + /** Query a serving endpoint. */ public QueryEndpointResponse query(QueryEndpointInput request) { return impl.query(request); } @@ -256,7 +257,7 @@ public Wait updateConfig(Strin } /** - * Update a serving endpoint with a new config. + * Update config of a serving endpoint. * *

Updates any combination of the serving endpoint's served entities, the compute configuration * of those served entities, and the endpoint's traffic config. An endpoint that already has an diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java index 799bbd8bb..9a0eda68b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsService.java @@ -23,8 +23,7 @@ @Generated public interface ServingEndpointsService { /** - * Retrieve the logs associated with building the model's environment for a given serving - * endpoint's served model. + * Get build logs for a served model. * *

Retrieves the build logs associated with the provided served model. */ @@ -37,7 +36,7 @@ public interface ServingEndpointsService { void delete(DeleteServingEndpointRequest deleteServingEndpointRequest); /** - * Retrieve the metrics associated with a serving endpoint. + * Get metrics of a serving endpoint. * *

Retrieves the metrics associated with the provided serving endpoint in either Prometheus or * OpenMetrics exposition format. @@ -68,32 +67,32 @@ GetServingEndpointPermissionLevelsResponse getPermissionLevels( ServingEndpointPermissions getPermissions( GetServingEndpointPermissionsRequest getServingEndpointPermissionsRequest); - /** Retrieve all serving endpoints. */ + /** Get all serving endpoints. */ ListEndpointsResponse list(); /** - * Retrieve the most recent log lines associated with a given serving endpoint's served model. + * Get the latest logs for a served model. * *

Retrieves the service logs associated with the provided served model. */ ServerLogsResponse logs(LogsRequest logsRequest); /** - * Patch the tags of a serving endpoint. + * Update tags of a serving endpoint. * *

Used to batch add and delete tags from a serving endpoint with a single API call. */ Collection patch(PatchServingEndpointTags patchServingEndpointTags); /** - * Update the rate limits of a serving endpoint. + * Update rate limits of a serving endpoint. * *

Used to update the rate limits of a serving endpoint. NOTE: only external and foundation * model endpoints are supported as of now. */ PutResponse put(PutRequest putRequest); - /** Query a serving endpoint with provided model input. */ + /** Query a serving endpoint. */ QueryEndpointResponse query(QueryEndpointInput queryEndpointInput); /** @@ -106,7 +105,7 @@ ServingEndpointPermissions setPermissions( ServingEndpointPermissionsRequest servingEndpointPermissionsRequest); /** - * Update a serving endpoint with a new config. + * Update config of a serving endpoint. * *

Updates any combination of the serving endpoint's served entities, the compute configuration * of those served entities, and the endpoint's traffic config. An endpoint that already has an diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsAPI.java index 44bdbf820..a8a4d01c5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountIpAccessListsAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -101,7 +102,11 @@ public GetIpAccessListResponse get(GetAccountIpAccessListRequest request) { *

Gets all IP access lists for the specified account. */ public Iterable list() { - return impl.list().getIpAccessLists(); + return new Paginator<>( + null, + (Void v) -> impl.list(), + GetIpAccessListsResponse::getIpAccessLists, + response -> null); } public void replace(String ipAccessListId, String label, ListType listType, boolean enabled) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java index e860939a8..cb24708e1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java @@ -6,127 +6,57 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * The Personal Compute enablement setting lets you control which users can use the Personal Compute - * default policy to create compute resources. By default all users in all workspaces have access - * (ON), but you can change the setting to instead let individual workspaces configure access - * control (DELEGATE). - * - *

There is only one instance of this setting per account. Since this setting has a default - * value, this setting is present on all accounts even though it's never set on a given account. - * Deletion reverts the value of the setting back to the default value. - */ +/** Accounts Settings API allows users to manage settings at the account level. */ @Generated public class AccountSettingsAPI { private static final Logger LOG = LoggerFactory.getLogger(AccountSettingsAPI.class); private final AccountSettingsService impl; + private CspEnablementAccountAPI cspEnablementAccountAPI; + + private EsmEnablementAccountAPI esmEnablementAccountAPI; + + private PersonalComputeAPI personalComputeAPI; + /** Regular-use constructor */ public AccountSettingsAPI(ApiClient apiClient) { impl = new AccountSettingsImpl(apiClient); - } - - /** Constructor for mocks */ - public AccountSettingsAPI(AccountSettingsService mock) { - impl = mock; - } - /** - * Delete Personal Compute setting. - * - *

Reverts back the Personal Compute setting value to default (ON) - */ - public DeletePersonalComputeSettingResponse deletePersonalComputeSetting( - DeletePersonalComputeSettingRequest request) { - return impl.deletePersonalComputeSetting(request); - } - - /** - * Get the compliance security profile setting for new workspaces. - * - *

Gets the compliance security profile setting for new workspaces. - */ - public CspEnablementAccountSetting getCspEnablementAccountSetting( - GetCspEnablementAccountSettingRequest request) { - return impl.getCspEnablementAccountSetting(request); - } + cspEnablementAccountAPI = new CspEnablementAccountAPI(apiClient); - /** - * Get the enhanced security monitoring setting for new workspaces. - * - *

Gets the enhanced security monitoring setting for new workspaces. - */ - public EsmEnablementAccountSetting getEsmEnablementAccountSetting( - GetEsmEnablementAccountSettingRequest request) { - return impl.getEsmEnablementAccountSetting(request); - } + esmEnablementAccountAPI = new EsmEnablementAccountAPI(apiClient); - /** - * Get Personal Compute setting. - * - *

Gets the value of the Personal Compute setting. - */ - public PersonalComputeSetting getPersonalComputeSetting( - GetPersonalComputeSettingRequest request) { - return impl.getPersonalComputeSetting(request); + personalComputeAPI = new PersonalComputeAPI(apiClient); } - public CspEnablementAccountSetting updateCspEnablementAccountSetting( - boolean allowMissing, CspEnablementAccountSetting setting, String fieldMask) { - return updateCspEnablementAccountSetting( - new UpdateCspEnablementAccountSettingRequest() - .setAllowMissing(allowMissing) - .setSetting(setting) - .setFieldMask(fieldMask)); + /** Constructor for mocks */ + public AccountSettingsAPI(AccountSettingsService mock) { + impl = mock; } /** - * Update the compliance security profile setting for new workspaces. - * - *

Updates the value of the compliance security profile setting for new workspaces. + * The compliance security profile settings at the account level control whether to enable it for + * new workspaces. */ - public CspEnablementAccountSetting updateCspEnablementAccountSetting( - UpdateCspEnablementAccountSettingRequest request) { - return impl.updateCspEnablementAccountSetting(request); - } - - public EsmEnablementAccountSetting updateEsmEnablementAccountSetting( - boolean allowMissing, EsmEnablementAccountSetting setting, String fieldMask) { - return updateEsmEnablementAccountSetting( - new UpdateEsmEnablementAccountSettingRequest() - .setAllowMissing(allowMissing) - .setSetting(setting) - .setFieldMask(fieldMask)); + public CspEnablementAccountAPI CspEnablementAccount() { + return cspEnablementAccountAPI; } /** - * Update the enhanced security monitoring setting for new workspaces. - * - *

Updates the value of the enhanced security monitoring setting for new workspaces. + * The enhanced security monitoring setting at the account level controls whether to enable the + * feature on new workspaces. */ - public EsmEnablementAccountSetting updateEsmEnablementAccountSetting( - UpdateEsmEnablementAccountSettingRequest request) { - return impl.updateEsmEnablementAccountSetting(request); - } - - public PersonalComputeSetting updatePersonalComputeSetting( - boolean allowMissing, PersonalComputeSetting setting, String fieldMask) { - return updatePersonalComputeSetting( - new UpdatePersonalComputeSettingRequest() - .setAllowMissing(allowMissing) - .setSetting(setting) - .setFieldMask(fieldMask)); + public EsmEnablementAccountAPI EsmEnablementAccount() { + return esmEnablementAccountAPI; } /** - * Update Personal Compute setting. - * - *

Updates the value of the Personal Compute setting. + * The Personal Compute enablement setting lets you control which users can use the Personal + * Compute default policy to create compute resources. */ - public PersonalComputeSetting updatePersonalComputeSetting( - UpdatePersonalComputeSettingRequest request) { - return impl.updatePersonalComputeSetting(request); + public PersonalComputeAPI PersonalCompute() { + return personalComputeAPI; } public AccountSettingsService impl() { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsImpl.java index df24598fd..8a8e32ef9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsImpl.java @@ -3,8 +3,6 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; /** Package-local implementation of AccountSettings */ @Generated @@ -14,91 +12,4 @@ class AccountSettingsImpl implements AccountSettingsService { public AccountSettingsImpl(ApiClient apiClient) { this.apiClient = apiClient; } - - @Override - public DeletePersonalComputeSettingResponse deletePersonalComputeSetting( - DeletePersonalComputeSettingRequest request) { - String path = - String.format( - "/api/2.0/accounts/%s/settings/types/dcp_acct_enable/names/default", - apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.DELETE(path, request, DeletePersonalComputeSettingResponse.class, headers); - } - - @Override - public CspEnablementAccountSetting getCspEnablementAccountSetting( - GetCspEnablementAccountSettingRequest request) { - String path = - String.format( - "/api/2.0/accounts/%s/settings/types/shield_csp_enablement_ac/names/default", - apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, CspEnablementAccountSetting.class, headers); - } - - @Override - public EsmEnablementAccountSetting getEsmEnablementAccountSetting( - GetEsmEnablementAccountSettingRequest request) { - String path = - String.format( - "/api/2.0/accounts/%s/settings/types/shield_esm_enablement_ac/names/default", - apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, EsmEnablementAccountSetting.class, headers); - } - - @Override - public PersonalComputeSetting getPersonalComputeSetting( - GetPersonalComputeSettingRequest request) { - String path = - String.format( - "/api/2.0/accounts/%s/settings/types/dcp_acct_enable/names/default", - apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, PersonalComputeSetting.class, headers); - } - - @Override - public CspEnablementAccountSetting updateCspEnablementAccountSetting( - UpdateCspEnablementAccountSettingRequest request) { - String path = - String.format( - "/api/2.0/accounts/%s/settings/types/shield_csp_enablement_ac/names/default", - apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, CspEnablementAccountSetting.class, headers); - } - - @Override - public EsmEnablementAccountSetting updateEsmEnablementAccountSetting( - UpdateEsmEnablementAccountSettingRequest request) { - String path = - String.format( - "/api/2.0/accounts/%s/settings/types/shield_esm_enablement_ac/names/default", - apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, EsmEnablementAccountSetting.class, headers); - } - - @Override - public PersonalComputeSetting updatePersonalComputeSetting( - UpdatePersonalComputeSettingRequest request) { - String path = - String.format( - "/api/2.0/accounts/%s/settings/types/dcp_acct_enable/names/default", - apiClient.configuredAccountID()); - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, PersonalComputeSetting.class, headers); - } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsService.java index 5e7b33a5f..269b9dfb4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsService.java @@ -4,74 +4,11 @@ import com.databricks.sdk.support.Generated; /** - * The Personal Compute enablement setting lets you control which users can use the Personal Compute - * default policy to create compute resources. By default all users in all workspaces have access - * (ON), but you can change the setting to instead let individual workspaces configure access - * control (DELEGATE). - * - *

There is only one instance of this setting per account. Since this setting has a default - * value, this setting is present on all accounts even though it's never set on a given account. - * Deletion reverts the value of the setting back to the default value. + * Accounts Settings API allows users to manage settings at the account level. * *

This is the high-level interface, that contains generated methods. * *

Evolving: this interface is under development. Method signatures may change. */ @Generated -public interface AccountSettingsService { - /** - * Delete Personal Compute setting. - * - *

Reverts back the Personal Compute setting value to default (ON) - */ - DeletePersonalComputeSettingResponse deletePersonalComputeSetting( - DeletePersonalComputeSettingRequest deletePersonalComputeSettingRequest); - - /** - * Get the compliance security profile setting for new workspaces. - * - *

Gets the compliance security profile setting for new workspaces. - */ - CspEnablementAccountSetting getCspEnablementAccountSetting( - GetCspEnablementAccountSettingRequest getCspEnablementAccountSettingRequest); - - /** - * Get the enhanced security monitoring setting for new workspaces. - * - *

Gets the enhanced security monitoring setting for new workspaces. - */ - EsmEnablementAccountSetting getEsmEnablementAccountSetting( - GetEsmEnablementAccountSettingRequest getEsmEnablementAccountSettingRequest); - - /** - * Get Personal Compute setting. - * - *

Gets the value of the Personal Compute setting. - */ - PersonalComputeSetting getPersonalComputeSetting( - GetPersonalComputeSettingRequest getPersonalComputeSettingRequest); - - /** - * Update the compliance security profile setting for new workspaces. - * - *

Updates the value of the compliance security profile setting for new workspaces. - */ - CspEnablementAccountSetting updateCspEnablementAccountSetting( - UpdateCspEnablementAccountSettingRequest updateCspEnablementAccountSettingRequest); - - /** - * Update the enhanced security monitoring setting for new workspaces. - * - *

Updates the value of the enhanced security monitoring setting for new workspaces. - */ - EsmEnablementAccountSetting updateEsmEnablementAccountSetting( - UpdateEsmEnablementAccountSettingRequest updateEsmEnablementAccountSettingRequest); - - /** - * Update Personal Compute setting. - * - *

Updates the value of the Personal Compute setting. - */ - PersonalComputeSetting updatePersonalComputeSetting( - UpdatePersonalComputeSettingRequest updatePersonalComputeSettingRequest); -} +public interface AccountSettingsService {} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateAPI.java new file mode 100755 index 000000000..fa7d583a7 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateAPI.java @@ -0,0 +1,63 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Controls whether automatic cluster update is enabled for the current workspace. By default, it is + * turned off. + */ +@Generated +public class AutomaticClusterUpdateAPI { + private static final Logger LOG = LoggerFactory.getLogger(AutomaticClusterUpdateAPI.class); + + private final AutomaticClusterUpdateService impl; + + /** Regular-use constructor */ + public AutomaticClusterUpdateAPI(ApiClient apiClient) { + impl = new AutomaticClusterUpdateImpl(apiClient); + } + + /** Constructor for mocks */ + public AutomaticClusterUpdateAPI(AutomaticClusterUpdateService mock) { + impl = mock; + } + + /** + * Get the automatic cluster update setting. + * + *

Gets the automatic cluster update setting. + */ + public AutomaticClusterUpdateSetting get(GetAutomaticClusterUpdateSettingRequest request) { + return impl.get(request); + } + + public AutomaticClusterUpdateSetting update( + boolean allowMissing, AutomaticClusterUpdateSetting setting, String fieldMask) { + return update( + new UpdateAutomaticClusterUpdateSettingRequest() + .setAllowMissing(allowMissing) + .setSetting(setting) + .setFieldMask(fieldMask)); + } + + /** + * Update the automatic cluster update setting. + * + *

Updates the automatic cluster update setting for the workspace. A fresh etag needs to be + * provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by + * making a `GET` request before the `PATCH` request. If the setting is updated concurrently, + * `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 + * response. + */ + public AutomaticClusterUpdateSetting update(UpdateAutomaticClusterUpdateSettingRequest request) { + return impl.update(request); + } + + public AutomaticClusterUpdateService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateImpl.java new file mode 100755 index 000000000..9771064ad --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateImpl.java @@ -0,0 +1,34 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of AutomaticClusterUpdate */ +@Generated +class AutomaticClusterUpdateImpl implements AutomaticClusterUpdateService { + private final ApiClient apiClient; + + public AutomaticClusterUpdateImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public AutomaticClusterUpdateSetting get(GetAutomaticClusterUpdateSettingRequest request) { + String path = "/api/2.0/settings/types/automatic_cluster_update/names/default"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, AutomaticClusterUpdateSetting.class, headers); + } + + @Override + public AutomaticClusterUpdateSetting update(UpdateAutomaticClusterUpdateSettingRequest request) { + String path = "/api/2.0/settings/types/automatic_cluster_update/names/default"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.PATCH(path, request, AutomaticClusterUpdateSetting.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateService.java new file mode 100755 index 000000000..73d68ffb8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateService.java @@ -0,0 +1,35 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +/** + * Controls whether automatic cluster update is enabled for the current workspace. By default, it is + * turned off. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface AutomaticClusterUpdateService { + /** + * Get the automatic cluster update setting. + * + *

Gets the automatic cluster update setting. + */ + AutomaticClusterUpdateSetting get( + GetAutomaticClusterUpdateSettingRequest getAutomaticClusterUpdateSettingRequest); + + /** + * Update the automatic cluster update setting. + * + *

Updates the automatic cluster update setting for the workspace. A fresh etag needs to be + * provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by + * making a `GET` request before the `PATCH` request. If the setting is updated concurrently, + * `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 + * response. + */ + AutomaticClusterUpdateSetting update( + UpdateAutomaticClusterUpdateSettingRequest updateAutomaticClusterUpdateSettingRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigRequest.java index 8096e39cb..dcec78377 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CreateNetworkConnectivityConfigRequest.java @@ -18,8 +18,8 @@ public class CreateNetworkConnectivityConfigRequest { private String name; /** - * The Azure region for this network connectivity configuration. Only workspaces in the same Azure - * region can be attached to this network connectivity configuration. + * The region for the network connectivity configuration. Only workspaces in the same region can + * be attached to the network connectivity configuration. */ @JsonProperty("region") private String region; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAPI.java new file mode 100755 index 000000000..facaf8b3e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAPI.java @@ -0,0 +1,65 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Controls whether to enable the compliance security profile for the current workspace. Enabling it + * on a workspace is permanent. By default, it is turned off. + * + *

This settings can NOT be disabled once it is enabled. + */ +@Generated +public class CspEnablementAPI { + private static final Logger LOG = LoggerFactory.getLogger(CspEnablementAPI.class); + + private final CspEnablementService impl; + + /** Regular-use constructor */ + public CspEnablementAPI(ApiClient apiClient) { + impl = new CspEnablementImpl(apiClient); + } + + /** Constructor for mocks */ + public CspEnablementAPI(CspEnablementService mock) { + impl = mock; + } + + /** + * Get the compliance security profile setting. + * + *

Gets the compliance security profile setting. + */ + public CspEnablementSetting get(GetCspEnablementSettingRequest request) { + return impl.get(request); + } + + public CspEnablementSetting update( + boolean allowMissing, CspEnablementSetting setting, String fieldMask) { + return update( + new UpdateCspEnablementSettingRequest() + .setAllowMissing(allowMissing) + .setSetting(setting) + .setFieldMask(fieldMask)); + } + + /** + * Update the compliance security profile setting. + * + *

Updates the compliance security profile setting for the workspace. A fresh etag needs to be + * provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by + * making a `GET` request before the `PATCH` request. If the setting is updated concurrently, + * `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 + * response. + */ + public CspEnablementSetting update(UpdateCspEnablementSettingRequest request) { + return impl.update(request); + } + + public CspEnablementService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountAPI.java new file mode 100755 index 000000000..99c33fd21 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountAPI.java @@ -0,0 +1,64 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * The compliance security profile settings at the account level control whether to enable it for + * new workspaces. By default, this account-level setting is disabled for new workspaces. After + * workspace creation, account admins can enable the compliance security profile individually for + * each workspace. + * + *

This settings can be disabled so that new workspaces do not have compliance security profile + * enabled by default. + */ +@Generated +public class CspEnablementAccountAPI { + private static final Logger LOG = LoggerFactory.getLogger(CspEnablementAccountAPI.class); + + private final CspEnablementAccountService impl; + + /** Regular-use constructor */ + public CspEnablementAccountAPI(ApiClient apiClient) { + impl = new CspEnablementAccountImpl(apiClient); + } + + /** Constructor for mocks */ + public CspEnablementAccountAPI(CspEnablementAccountService mock) { + impl = mock; + } + + /** + * Get the compliance security profile setting for new workspaces. + * + *

Gets the compliance security profile setting for new workspaces. + */ + public CspEnablementAccountSetting get(GetCspEnablementAccountSettingRequest request) { + return impl.get(request); + } + + public CspEnablementAccountSetting update( + boolean allowMissing, CspEnablementAccountSetting setting, String fieldMask) { + return update( + new UpdateCspEnablementAccountSettingRequest() + .setAllowMissing(allowMissing) + .setSetting(setting) + .setFieldMask(fieldMask)); + } + + /** + * Update the compliance security profile setting for new workspaces. + * + *

Updates the value of the compliance security profile setting for new workspaces. + */ + public CspEnablementAccountSetting update(UpdateCspEnablementAccountSettingRequest request) { + return impl.update(request); + } + + public CspEnablementAccountService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountImpl.java new file mode 100755 index 000000000..ee1e5cbb8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountImpl.java @@ -0,0 +1,40 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of CspEnablementAccount */ +@Generated +class CspEnablementAccountImpl implements CspEnablementAccountService { + private final ApiClient apiClient; + + public CspEnablementAccountImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public CspEnablementAccountSetting get(GetCspEnablementAccountSettingRequest request) { + String path = + String.format( + "/api/2.0/accounts/%s/settings/types/shield_csp_enablement_ac/names/default", + apiClient.configuredAccountID()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, CspEnablementAccountSetting.class, headers); + } + + @Override + public CspEnablementAccountSetting update(UpdateCspEnablementAccountSettingRequest request) { + String path = + String.format( + "/api/2.0/accounts/%s/settings/types/shield_csp_enablement_ac/names/default", + apiClient.configuredAccountID()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.PATCH(path, request, CspEnablementAccountSetting.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountService.java new file mode 100755 index 000000000..09bbdf343 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementAccountService.java @@ -0,0 +1,36 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +/** + * The compliance security profile settings at the account level control whether to enable it for + * new workspaces. By default, this account-level setting is disabled for new workspaces. After + * workspace creation, account admins can enable the compliance security profile individually for + * each workspace. + * + *

This settings can be disabled so that new workspaces do not have compliance security profile + * enabled by default. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface CspEnablementAccountService { + /** + * Get the compliance security profile setting for new workspaces. + * + *

Gets the compliance security profile setting for new workspaces. + */ + CspEnablementAccountSetting get( + GetCspEnablementAccountSettingRequest getCspEnablementAccountSettingRequest); + + /** + * Update the compliance security profile setting for new workspaces. + * + *

Updates the value of the compliance security profile setting for new workspaces. + */ + CspEnablementAccountSetting update( + UpdateCspEnablementAccountSettingRequest updateCspEnablementAccountSettingRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementImpl.java new file mode 100755 index 000000000..2b8e77fec --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementImpl.java @@ -0,0 +1,34 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of CspEnablement */ +@Generated +class CspEnablementImpl implements CspEnablementService { + private final ApiClient apiClient; + + public CspEnablementImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public CspEnablementSetting get(GetCspEnablementSettingRequest request) { + String path = "/api/2.0/settings/types/shield_csp_enablement_ws_db/names/default"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, CspEnablementSetting.class, headers); + } + + @Override + public CspEnablementSetting update(UpdateCspEnablementSettingRequest request) { + String path = "/api/2.0/settings/types/shield_csp_enablement_ws_db/names/default"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.PATCH(path, request, CspEnablementSetting.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementService.java new file mode 100755 index 000000000..5f5da79fd --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/CspEnablementService.java @@ -0,0 +1,35 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +/** + * Controls whether to enable the compliance security profile for the current workspace. Enabling it + * on a workspace is permanent. By default, it is turned off. + * + *

This settings can NOT be disabled once it is enabled. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface CspEnablementService { + /** + * Get the compliance security profile setting. + * + *

Gets the compliance security profile setting. + */ + CspEnablementSetting get(GetCspEnablementSettingRequest getCspEnablementSettingRequest); + + /** + * Update the compliance security profile setting. + * + *

Updates the compliance security profile setting for the workspace. A fresh etag needs to be + * provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by + * making a `GET` request before the `PATCH` request. If the setting is updated concurrently, + * `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 + * response. + */ + CspEnablementSetting update(UpdateCspEnablementSettingRequest updateCspEnablementSettingRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceAPI.java new file mode 100755 index 000000000..a221e76ea --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceAPI.java @@ -0,0 +1,85 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * The default namespace setting API allows users to configure the default namespace for a + * Databricks workspace. + * + *

Through this API, users can retrieve, set, or modify the default namespace used when queries + * do not reference a fully qualified three-level name. For example, if you use the API to set + * 'retail_prod' as the default catalog, then a query 'SELECT * FROM myTable' would reference the + * object 'retail_prod.default.myTable' (the schema 'default' is always assumed). + * + *

This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, + * the default namespace only applies when using Unity Catalog-enabled compute. + */ +@Generated +public class DefaultNamespaceAPI { + private static final Logger LOG = LoggerFactory.getLogger(DefaultNamespaceAPI.class); + + private final DefaultNamespaceService impl; + + /** Regular-use constructor */ + public DefaultNamespaceAPI(ApiClient apiClient) { + impl = new DefaultNamespaceImpl(apiClient); + } + + /** Constructor for mocks */ + public DefaultNamespaceAPI(DefaultNamespaceService mock) { + impl = mock; + } + + /** + * Delete the default namespace setting. + * + *

Deletes the default namespace setting for the workspace. A fresh etag needs to be provided + * in `DELETE` requests (as a query parameter). The etag can be retrieved by making a `GET` + * request before the `DELETE` request. If the setting is updated/deleted concurrently, `DELETE` + * fails with 409 and the request must be retried by using the fresh etag in the 409 response. + */ + public DeleteDefaultNamespaceSettingResponse delete( + DeleteDefaultNamespaceSettingRequest request) { + return impl.delete(request); + } + + /** + * Get the default namespace setting. + * + *

Gets the default namespace setting. + */ + public DefaultNamespaceSetting get(GetDefaultNamespaceSettingRequest request) { + return impl.get(request); + } + + public DefaultNamespaceSetting update( + boolean allowMissing, DefaultNamespaceSetting setting, String fieldMask) { + return update( + new UpdateDefaultNamespaceSettingRequest() + .setAllowMissing(allowMissing) + .setSetting(setting) + .setFieldMask(fieldMask)); + } + + /** + * Update the default namespace setting. + * + *

Updates the default namespace setting for the workspace. A fresh etag needs to be provided + * in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` + * request before the `PATCH` request. Note that if the setting does not exist, `GET` returns a + * NOT_FOUND error and the etag is present in the error response, which should be set in the + * `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the request + * must be retried by using the fresh etag in the 409 response. + */ + public DefaultNamespaceSetting update(UpdateDefaultNamespaceSettingRequest request) { + return impl.update(request); + } + + public DefaultNamespaceService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceImpl.java new file mode 100755 index 000000000..6912944f9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceImpl.java @@ -0,0 +1,43 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of DefaultNamespace */ +@Generated +class DefaultNamespaceImpl implements DefaultNamespaceService { + private final ApiClient apiClient; + + public DefaultNamespaceImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public DeleteDefaultNamespaceSettingResponse delete( + DeleteDefaultNamespaceSettingRequest request) { + String path = "/api/2.0/settings/types/default_namespace_ws/names/default"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.DELETE(path, request, DeleteDefaultNamespaceSettingResponse.class, headers); + } + + @Override + public DefaultNamespaceSetting get(GetDefaultNamespaceSettingRequest request) { + String path = "/api/2.0/settings/types/default_namespace_ws/names/default"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, DefaultNamespaceSetting.class, headers); + } + + @Override + public DefaultNamespaceSetting update(UpdateDefaultNamespaceSettingRequest request) { + String path = "/api/2.0/settings/types/default_namespace_ws/names/default"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.PATCH(path, request, DefaultNamespaceSetting.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceService.java new file mode 100755 index 000000000..7c7626ca2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNamespaceService.java @@ -0,0 +1,54 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +/** + * The default namespace setting API allows users to configure the default namespace for a + * Databricks workspace. + * + *

Through this API, users can retrieve, set, or modify the default namespace used when queries + * do not reference a fully qualified three-level name. For example, if you use the API to set + * 'retail_prod' as the default catalog, then a query 'SELECT * FROM myTable' would reference the + * object 'retail_prod.default.myTable' (the schema 'default' is always assumed). + * + *

This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, + * the default namespace only applies when using Unity Catalog-enabled compute. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface DefaultNamespaceService { + /** + * Delete the default namespace setting. + * + *

Deletes the default namespace setting for the workspace. A fresh etag needs to be provided + * in `DELETE` requests (as a query parameter). The etag can be retrieved by making a `GET` + * request before the `DELETE` request. If the setting is updated/deleted concurrently, `DELETE` + * fails with 409 and the request must be retried by using the fresh etag in the 409 response. + */ + DeleteDefaultNamespaceSettingResponse delete( + DeleteDefaultNamespaceSettingRequest deleteDefaultNamespaceSettingRequest); + + /** + * Get the default namespace setting. + * + *

Gets the default namespace setting. + */ + DefaultNamespaceSetting get(GetDefaultNamespaceSettingRequest getDefaultNamespaceSettingRequest); + + /** + * Update the default namespace setting. + * + *

Updates the default namespace setting for the workspace. A fresh etag needs to be provided + * in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` + * request before the `PATCH` request. Note that if the setting does not exist, `GET` returns a + * NOT_FOUND error and the etag is present in the error response, which should be set in the + * `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the request + * must be retried by using the fresh etag in the 409 response. + */ + DefaultNamespaceSetting update( + UpdateDefaultNamespaceSettingRequest updateDefaultNamespaceSettingRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAPI.java new file mode 100755 index 000000000..fe01a2693 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAPI.java @@ -0,0 +1,67 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Controls whether enhanced security monitoring is enabled for the current workspace. If the + * compliance security profile is enabled, this is automatically enabled. By default, it is + * disabled. However, if the compliance security profile is enabled, this is automatically enabled. + * + *

If the compliance security profile is disabled, you can enable or disable this setting and it + * is not permanent. + */ +@Generated +public class EsmEnablementAPI { + private static final Logger LOG = LoggerFactory.getLogger(EsmEnablementAPI.class); + + private final EsmEnablementService impl; + + /** Regular-use constructor */ + public EsmEnablementAPI(ApiClient apiClient) { + impl = new EsmEnablementImpl(apiClient); + } + + /** Constructor for mocks */ + public EsmEnablementAPI(EsmEnablementService mock) { + impl = mock; + } + + /** + * Get the enhanced security monitoring setting. + * + *

Gets the enhanced security monitoring setting. + */ + public EsmEnablementSetting get(GetEsmEnablementSettingRequest request) { + return impl.get(request); + } + + public EsmEnablementSetting update( + boolean allowMissing, EsmEnablementSetting setting, String fieldMask) { + return update( + new UpdateEsmEnablementSettingRequest() + .setAllowMissing(allowMissing) + .setSetting(setting) + .setFieldMask(fieldMask)); + } + + /** + * Update the enhanced security monitoring setting. + * + *

Updates the enhanced security monitoring setting for the workspace. A fresh etag needs to be + * provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by + * making a `GET` request before the `PATCH` request. If the setting is updated concurrently, + * `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 + * response. + */ + public EsmEnablementSetting update(UpdateEsmEnablementSettingRequest request) { + return impl.update(request); + } + + public EsmEnablementService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountAPI.java new file mode 100755 index 000000000..cec5ff9f0 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountAPI.java @@ -0,0 +1,61 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * The enhanced security monitoring setting at the account level controls whether to enable the + * feature on new workspaces. By default, this account-level setting is disabled for new workspaces. + * After workspace creation, account admins can enable enhanced security monitoring individually for + * each workspace. + */ +@Generated +public class EsmEnablementAccountAPI { + private static final Logger LOG = LoggerFactory.getLogger(EsmEnablementAccountAPI.class); + + private final EsmEnablementAccountService impl; + + /** Regular-use constructor */ + public EsmEnablementAccountAPI(ApiClient apiClient) { + impl = new EsmEnablementAccountImpl(apiClient); + } + + /** Constructor for mocks */ + public EsmEnablementAccountAPI(EsmEnablementAccountService mock) { + impl = mock; + } + + /** + * Get the enhanced security monitoring setting for new workspaces. + * + *

Gets the enhanced security monitoring setting for new workspaces. + */ + public EsmEnablementAccountSetting get(GetEsmEnablementAccountSettingRequest request) { + return impl.get(request); + } + + public EsmEnablementAccountSetting update( + boolean allowMissing, EsmEnablementAccountSetting setting, String fieldMask) { + return update( + new UpdateEsmEnablementAccountSettingRequest() + .setAllowMissing(allowMissing) + .setSetting(setting) + .setFieldMask(fieldMask)); + } + + /** + * Update the enhanced security monitoring setting for new workspaces. + * + *

Updates the value of the enhanced security monitoring setting for new workspaces. + */ + public EsmEnablementAccountSetting update(UpdateEsmEnablementAccountSettingRequest request) { + return impl.update(request); + } + + public EsmEnablementAccountService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountImpl.java new file mode 100755 index 000000000..6e3fb4adc --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountImpl.java @@ -0,0 +1,40 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of EsmEnablementAccount */ +@Generated +class EsmEnablementAccountImpl implements EsmEnablementAccountService { + private final ApiClient apiClient; + + public EsmEnablementAccountImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public EsmEnablementAccountSetting get(GetEsmEnablementAccountSettingRequest request) { + String path = + String.format( + "/api/2.0/accounts/%s/settings/types/shield_esm_enablement_ac/names/default", + apiClient.configuredAccountID()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, EsmEnablementAccountSetting.class, headers); + } + + @Override + public EsmEnablementAccountSetting update(UpdateEsmEnablementAccountSettingRequest request) { + String path = + String.format( + "/api/2.0/accounts/%s/settings/types/shield_esm_enablement_ac/names/default", + apiClient.configuredAccountID()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.PATCH(path, request, EsmEnablementAccountSetting.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountService.java new file mode 100755 index 000000000..51a33c99b --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementAccountService.java @@ -0,0 +1,33 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +/** + * The enhanced security monitoring setting at the account level controls whether to enable the + * feature on new workspaces. By default, this account-level setting is disabled for new workspaces. + * After workspace creation, account admins can enable enhanced security monitoring individually for + * each workspace. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface EsmEnablementAccountService { + /** + * Get the enhanced security monitoring setting for new workspaces. + * + *

Gets the enhanced security monitoring setting for new workspaces. + */ + EsmEnablementAccountSetting get( + GetEsmEnablementAccountSettingRequest getEsmEnablementAccountSettingRequest); + + /** + * Update the enhanced security monitoring setting for new workspaces. + * + *

Updates the value of the enhanced security monitoring setting for new workspaces. + */ + EsmEnablementAccountSetting update( + UpdateEsmEnablementAccountSettingRequest updateEsmEnablementAccountSettingRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementImpl.java new file mode 100755 index 000000000..f8cc5ba2f --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementImpl.java @@ -0,0 +1,34 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of EsmEnablement */ +@Generated +class EsmEnablementImpl implements EsmEnablementService { + private final ApiClient apiClient; + + public EsmEnablementImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public EsmEnablementSetting get(GetEsmEnablementSettingRequest request) { + String path = "/api/2.0/settings/types/shield_esm_enablement_ws_db/names/default"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, EsmEnablementSetting.class, headers); + } + + @Override + public EsmEnablementSetting update(UpdateEsmEnablementSettingRequest request) { + String path = "/api/2.0/settings/types/shield_esm_enablement_ws_db/names/default"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.PATCH(path, request, EsmEnablementSetting.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementService.java new file mode 100755 index 000000000..73c2ebfe3 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EsmEnablementService.java @@ -0,0 +1,37 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +/** + * Controls whether enhanced security monitoring is enabled for the current workspace. If the + * compliance security profile is enabled, this is automatically enabled. By default, it is + * disabled. However, if the compliance security profile is enabled, this is automatically enabled. + * + *

If the compliance security profile is disabled, you can enable or disable this setting and it + * is not permanent. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface EsmEnablementService { + /** + * Get the enhanced security monitoring setting. + * + *

Gets the enhanced security monitoring setting. + */ + EsmEnablementSetting get(GetEsmEnablementSettingRequest getEsmEnablementSettingRequest); + + /** + * Update the enhanced security monitoring setting. + * + *

Updates the enhanced security monitoring setting for the workspace. A fresh etag needs to be + * provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by + * making a `GET` request before the `PATCH` request. If the setting is updated concurrently, + * `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 + * response. + */ + EsmEnablementSetting update(UpdateEsmEnablementSettingRequest updateEsmEnablementSettingRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsAPI.java index b8a4c0ac0..d56ba378c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/IpAccessListsAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -101,7 +102,11 @@ public FetchIpAccessListResponse get(GetIpAccessListRequest request) { *

Gets all IP access lists for the specified workspace. */ public Iterable list() { - return impl.list().getIpAccessLists(); + return new Paginator<>( + null, + (Void v) -> impl.list(), + ListIpAccessListResponse::getIpAccessLists, + response -> null); } public void replace(String ipAccessListId, String label, ListType listType, boolean enabled) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAwsStableIpRule.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAwsStableIpRule.java new file mode 100755 index 000000000..d923e0b3e --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccAwsStableIpRule.java @@ -0,0 +1,50 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.ToStringer; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Collection; +import java.util.Objects; + +/** + * The stable AWS IP CIDR blocks. You can use these to configure the firewall of your resources to + * allow traffic from your Databricks workspace. + */ +@Generated +public class NccAwsStableIpRule { + /** + * The list of stable IP CIDR blocks from which Databricks network traffic originates when + * accessing your resources. + */ + @JsonProperty("cidr_blocks") + private Collection cidrBlocks; + + public NccAwsStableIpRule setCidrBlocks(Collection cidrBlocks) { + this.cidrBlocks = cidrBlocks; + return this; + } + + public Collection getCidrBlocks() { + return cidrBlocks; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NccAwsStableIpRule that = (NccAwsStableIpRule) o; + return Objects.equals(cidrBlocks, that.cidrBlocks); + } + + @Override + public int hashCode() { + return Objects.hash(cidrBlocks); + } + + @Override + public String toString() { + return new ToStringer(NccAwsStableIpRule.class).add("cidrBlocks", cidrBlocks).toString(); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRules.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRules.java index c48ba37df..8b697413b 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRules.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NccEgressDefaultRules.java @@ -14,6 +14,13 @@ */ @Generated public class NccEgressDefaultRules { + /** + * The stable AWS IP CIDR blocks. You can use these to configure the firewall of your resources to + * allow traffic from your Databricks workspace. + */ + @JsonProperty("aws_stable_ip_rule") + private NccAwsStableIpRule awsStableIpRule; + /** * The stable Azure service endpoints. You can configure the firewall of your Azure resources to * allow traffic from your Databricks serverless compute resources. @@ -21,6 +28,15 @@ public class NccEgressDefaultRules { @JsonProperty("azure_service_endpoint_rule") private NccAzureServiceEndpointRule azureServiceEndpointRule; + public NccEgressDefaultRules setAwsStableIpRule(NccAwsStableIpRule awsStableIpRule) { + this.awsStableIpRule = awsStableIpRule; + return this; + } + + public NccAwsStableIpRule getAwsStableIpRule() { + return awsStableIpRule; + } + public NccEgressDefaultRules setAzureServiceEndpointRule( NccAzureServiceEndpointRule azureServiceEndpointRule) { this.azureServiceEndpointRule = azureServiceEndpointRule; @@ -36,17 +52,19 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; NccEgressDefaultRules that = (NccEgressDefaultRules) o; - return Objects.equals(azureServiceEndpointRule, that.azureServiceEndpointRule); + return Objects.equals(awsStableIpRule, that.awsStableIpRule) + && Objects.equals(azureServiceEndpointRule, that.azureServiceEndpointRule); } @Override public int hashCode() { - return Objects.hash(azureServiceEndpointRule); + return Objects.hash(awsStableIpRule, azureServiceEndpointRule); } @Override public String toString() { return new ToStringer(NccEgressDefaultRules.class) + .add("awsStableIpRule", awsStableIpRule) .add("azureServiceEndpointRule", azureServiceEndpointRule) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java index 88175b208..e039d64d4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityAPI.java @@ -9,14 +9,7 @@ /** * These APIs provide configurations for the network connectivity of your workspaces for serverless - * compute resources. This API provides stable subnets for your workspace so that you can configure - * your firewalls on your Azure Storage accounts to allow access from Databricks. You can also use - * the API to provision private endpoints for Databricks to privately connect serverless compute - * resources to your Azure resources using Azure Private Link. See [configure serverless secure - * connectivity]. - * - *

[configure serverless secure connectivity]: - * https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security + * compute resources. */ @Generated public class NetworkConnectivityAPI { @@ -40,22 +33,7 @@ public NetworkConnectivityConfiguration createNetworkConnectivityConfiguration( new CreateNetworkConnectivityConfigRequest().setName(name).setRegion(region)); } - /** - * Create a network connectivity configuration. - * - *

Creates a network connectivity configuration (NCC), which provides stable Azure service - * subnets when accessing your Azure Storage accounts. You can also use a network connectivity - * configuration to create Databricks-managed private endpoints so that Databricks serverless - * compute resources privately access your resources. - * - *

**IMPORTANT**: After you create the network connectivity configuration, you must assign one - * or more workspaces to the new network connectivity configuration. You can share one network - * connectivity configuration with multiple workspaces from the same Azure region within the same - * Databricks account. See [configure serverless secure connectivity]. - * - *

[configure serverless secure connectivity]: - * https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security - */ + /** Create a network connectivity configuration. */ public NetworkConnectivityConfiguration createNetworkConnectivityConfiguration( CreateNetworkConnectivityConfigRequest request) { return impl.createNetworkConnectivityConfiguration(request); @@ -118,9 +96,10 @@ public NccAzurePrivateEndpointRule deletePrivateEndpointRule( /** * Delete a private endpoint rule. * - *

Initiates deleting a private endpoint rule. The private endpoint will be deactivated and - * will be purged after seven days of deactivation. When a private endpoint is in deactivated - * state, `deactivated` field is set to `true` and the private endpoint is not available to your + *

Initiates deleting a private endpoint rule. If the connection state is PENDING or EXPIRED, + * the private endpoint is immediately deleted. Otherwise, the private endpoint is deactivated and + * will be deleted after seven days of deactivation. When a private endpoint is deactivated, the + * `deactivated` field is set to `true` and the private endpoint is not available to your * serverless compute resources. */ public NccAzurePrivateEndpointRule deletePrivateEndpointRule( diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfiguration.java index 88c740562..398b70d30 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfiguration.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityConfiguration.java @@ -37,8 +37,8 @@ public class NetworkConnectivityConfiguration { private String networkConnectivityConfigId; /** - * The Azure region for this network connectivity configuration. Only workspaces in the same Azure - * region can be attached to this network connectivity configuration. + * The region for the network connectivity configuration. Only workspaces in the same region can + * be attached to the network connectivity configuration. */ @JsonProperty("region") private String region; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java index e5ed0e673..80ba453f5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/NetworkConnectivityService.java @@ -5,14 +5,7 @@ /** * These APIs provide configurations for the network connectivity of your workspaces for serverless - * compute resources. This API provides stable subnets for your workspace so that you can configure - * your firewalls on your Azure Storage accounts to allow access from Databricks. You can also use - * the API to provision private endpoints for Databricks to privately connect serverless compute - * resources to your Azure resources using Azure Private Link. See [configure serverless secure - * connectivity]. - * - *

[configure serverless secure connectivity]: - * https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security + * compute resources. * *

This is the high-level interface, that contains generated methods. * @@ -20,22 +13,7 @@ */ @Generated public interface NetworkConnectivityService { - /** - * Create a network connectivity configuration. - * - *

Creates a network connectivity configuration (NCC), which provides stable Azure service - * subnets when accessing your Azure Storage accounts. You can also use a network connectivity - * configuration to create Databricks-managed private endpoints so that Databricks serverless - * compute resources privately access your resources. - * - *

**IMPORTANT**: After you create the network connectivity configuration, you must assign one - * or more workspaces to the new network connectivity configuration. You can share one network - * connectivity configuration with multiple workspaces from the same Azure region within the same - * Databricks account. See [configure serverless secure connectivity]. - * - *

[configure serverless secure connectivity]: - * https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security - */ + /** Create a network connectivity configuration. */ NetworkConnectivityConfiguration createNetworkConnectivityConfiguration( CreateNetworkConnectivityConfigRequest createNetworkConnectivityConfigRequest); @@ -67,9 +45,10 @@ void deleteNetworkConnectivityConfiguration( /** * Delete a private endpoint rule. * - *

Initiates deleting a private endpoint rule. The private endpoint will be deactivated and - * will be purged after seven days of deactivation. When a private endpoint is in deactivated - * state, `deactivated` field is set to `true` and the private endpoint is not available to your + *

Initiates deleting a private endpoint rule. If the connection state is PENDING or EXPIRED, + * the private endpoint is immediately deleted. Otherwise, the private endpoint is deactivated and + * will be deleted after seven days of deactivation. When a private endpoint is deactivated, the + * `deactivated` field is set to `true` and the private endpoint is not available to your * serverless compute resources. */ NccAzurePrivateEndpointRule deletePrivateEndpointRule( diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeAPI.java new file mode 100755 index 000000000..df86c53f9 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeAPI.java @@ -0,0 +1,74 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * The Personal Compute enablement setting lets you control which users can use the Personal Compute + * default policy to create compute resources. By default all users in all workspaces have access + * (ON), but you can change the setting to instead let individual workspaces configure access + * control (DELEGATE). + * + *

There is only one instance of this setting per account. Since this setting has a default + * value, this setting is present on all accounts even though it's never set on a given account. + * Deletion reverts the value of the setting back to the default value. + */ +@Generated +public class PersonalComputeAPI { + private static final Logger LOG = LoggerFactory.getLogger(PersonalComputeAPI.class); + + private final PersonalComputeService impl; + + /** Regular-use constructor */ + public PersonalComputeAPI(ApiClient apiClient) { + impl = new PersonalComputeImpl(apiClient); + } + + /** Constructor for mocks */ + public PersonalComputeAPI(PersonalComputeService mock) { + impl = mock; + } + + /** + * Delete Personal Compute setting. + * + *

Reverts back the Personal Compute setting value to default (ON) + */ + public DeletePersonalComputeSettingResponse delete(DeletePersonalComputeSettingRequest request) { + return impl.delete(request); + } + + /** + * Get Personal Compute setting. + * + *

Gets the value of the Personal Compute setting. + */ + public PersonalComputeSetting get(GetPersonalComputeSettingRequest request) { + return impl.get(request); + } + + public PersonalComputeSetting update( + boolean allowMissing, PersonalComputeSetting setting, String fieldMask) { + return update( + new UpdatePersonalComputeSettingRequest() + .setAllowMissing(allowMissing) + .setSetting(setting) + .setFieldMask(fieldMask)); + } + + /** + * Update Personal Compute setting. + * + *

Updates the value of the Personal Compute setting. + */ + public PersonalComputeSetting update(UpdatePersonalComputeSettingRequest request) { + return impl.update(request); + } + + public PersonalComputeService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeImpl.java new file mode 100755 index 000000000..1e45ec4c6 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeImpl.java @@ -0,0 +1,51 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of PersonalCompute */ +@Generated +class PersonalComputeImpl implements PersonalComputeService { + private final ApiClient apiClient; + + public PersonalComputeImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public DeletePersonalComputeSettingResponse delete(DeletePersonalComputeSettingRequest request) { + String path = + String.format( + "/api/2.0/accounts/%s/settings/types/dcp_acct_enable/names/default", + apiClient.configuredAccountID()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.DELETE(path, request, DeletePersonalComputeSettingResponse.class, headers); + } + + @Override + public PersonalComputeSetting get(GetPersonalComputeSettingRequest request) { + String path = + String.format( + "/api/2.0/accounts/%s/settings/types/dcp_acct_enable/names/default", + apiClient.configuredAccountID()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, PersonalComputeSetting.class, headers); + } + + @Override + public PersonalComputeSetting update(UpdatePersonalComputeSettingRequest request) { + String path = + String.format( + "/api/2.0/accounts/%s/settings/types/dcp_acct_enable/names/default", + apiClient.configuredAccountID()); + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.PATCH(path, request, PersonalComputeSetting.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeService.java new file mode 100755 index 000000000..ba2942219 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/PersonalComputeService.java @@ -0,0 +1,44 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +/** + * The Personal Compute enablement setting lets you control which users can use the Personal Compute + * default policy to create compute resources. By default all users in all workspaces have access + * (ON), but you can change the setting to instead let individual workspaces configure access + * control (DELEGATE). + * + *

There is only one instance of this setting per account. Since this setting has a default + * value, this setting is present on all accounts even though it's never set on a given account. + * Deletion reverts the value of the setting back to the default value. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface PersonalComputeService { + /** + * Delete Personal Compute setting. + * + *

Reverts back the Personal Compute setting value to default (ON) + */ + DeletePersonalComputeSettingResponse delete( + DeletePersonalComputeSettingRequest deletePersonalComputeSettingRequest); + + /** + * Get Personal Compute setting. + * + *

Gets the value of the Personal Compute setting. + */ + PersonalComputeSetting get(GetPersonalComputeSettingRequest getPersonalComputeSettingRequest); + + /** + * Update Personal Compute setting. + * + *

Updates the value of the Personal Compute setting. + */ + PersonalComputeSetting update( + UpdatePersonalComputeSettingRequest updatePersonalComputeSettingRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsAPI.java new file mode 100755 index 000000000..1baad48a2 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsAPI.java @@ -0,0 +1,86 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * The Restrict Workspace Admins setting lets you control the capabilities of workspace admins. With + * the setting status set to ALLOW_ALL, workspace admins can create service principal personal + * access tokens on behalf of any service principal in their workspace. Workspace admins can also + * change a job owner to any user in their workspace. And they can change the job run_as setting to + * any user in their workspace or to a service principal on which they have the Service Principal + * User role. With the setting status set to RESTRICT_TOKENS_AND_JOB_RUN_AS, workspace admins can + * only create personal access tokens on behalf of service principals they have the Service + * Principal User role on. They can also only change a job owner to themselves. And they can change + * the job run_as setting to themselves or to a service principal on which they have the Service + * Principal User role. + */ +@Generated +public class RestrictWorkspaceAdminsAPI { + private static final Logger LOG = LoggerFactory.getLogger(RestrictWorkspaceAdminsAPI.class); + + private final RestrictWorkspaceAdminsService impl; + + /** Regular-use constructor */ + public RestrictWorkspaceAdminsAPI(ApiClient apiClient) { + impl = new RestrictWorkspaceAdminsImpl(apiClient); + } + + /** Constructor for mocks */ + public RestrictWorkspaceAdminsAPI(RestrictWorkspaceAdminsService mock) { + impl = mock; + } + + /** + * Delete the restrict workspace admins setting. + * + *

Reverts the restrict workspace admins setting status for the workspace. A fresh etag needs + * to be provided in `DELETE` requests (as a query parameter). The etag can be retrieved by making + * a `GET` request before the DELETE request. If the setting is updated/deleted concurrently, + * `DELETE` fails with 409 and the request must be retried by using the fresh etag in the 409 + * response. + */ + public DeleteRestrictWorkspaceAdminsSettingResponse delete( + DeleteRestrictWorkspaceAdminsSettingRequest request) { + return impl.delete(request); + } + + /** + * Get the restrict workspace admins setting. + * + *

Gets the restrict workspace admins setting. + */ + public RestrictWorkspaceAdminsSetting get(GetRestrictWorkspaceAdminsSettingRequest request) { + return impl.get(request); + } + + public RestrictWorkspaceAdminsSetting update( + boolean allowMissing, RestrictWorkspaceAdminsSetting setting, String fieldMask) { + return update( + new UpdateRestrictWorkspaceAdminsSettingRequest() + .setAllowMissing(allowMissing) + .setSetting(setting) + .setFieldMask(fieldMask)); + } + + /** + * Update the restrict workspace admins setting. + * + *

Updates the restrict workspace admins setting for the workspace. A fresh etag needs to be + * provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by + * making a GET request before the `PATCH` request. If the setting is updated concurrently, + * `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 + * response. + */ + public RestrictWorkspaceAdminsSetting update( + UpdateRestrictWorkspaceAdminsSettingRequest request) { + return impl.update(request); + } + + public RestrictWorkspaceAdminsService impl() { + return impl; + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsImpl.java new file mode 100755 index 000000000..10cafe064 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsImpl.java @@ -0,0 +1,45 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.core.ApiClient; +import com.databricks.sdk.support.Generated; +import java.util.HashMap; +import java.util.Map; + +/** Package-local implementation of RestrictWorkspaceAdmins */ +@Generated +class RestrictWorkspaceAdminsImpl implements RestrictWorkspaceAdminsService { + private final ApiClient apiClient; + + public RestrictWorkspaceAdminsImpl(ApiClient apiClient) { + this.apiClient = apiClient; + } + + @Override + public DeleteRestrictWorkspaceAdminsSettingResponse delete( + DeleteRestrictWorkspaceAdminsSettingRequest request) { + String path = "/api/2.0/settings/types/restrict_workspace_admins/names/default"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.DELETE( + path, request, DeleteRestrictWorkspaceAdminsSettingResponse.class, headers); + } + + @Override + public RestrictWorkspaceAdminsSetting get(GetRestrictWorkspaceAdminsSettingRequest request) { + String path = "/api/2.0/settings/types/restrict_workspace_admins/names/default"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + return apiClient.GET(path, request, RestrictWorkspaceAdminsSetting.class, headers); + } + + @Override + public RestrictWorkspaceAdminsSetting update( + UpdateRestrictWorkspaceAdminsSettingRequest request) { + String path = "/api/2.0/settings/types/restrict_workspace_admins/names/default"; + Map headers = new HashMap<>(); + headers.put("Accept", "application/json"); + headers.put("Content-Type", "application/json"); + return apiClient.PATCH(path, request, RestrictWorkspaceAdminsSetting.class, headers); + } +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsService.java new file mode 100755 index 000000000..6e1fcd475 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/RestrictWorkspaceAdminsService.java @@ -0,0 +1,55 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. +package com.databricks.sdk.service.settings; + +import com.databricks.sdk.support.Generated; + +/** + * The Restrict Workspace Admins setting lets you control the capabilities of workspace admins. With + * the setting status set to ALLOW_ALL, workspace admins can create service principal personal + * access tokens on behalf of any service principal in their workspace. Workspace admins can also + * change a job owner to any user in their workspace. And they can change the job run_as setting to + * any user in their workspace or to a service principal on which they have the Service Principal + * User role. With the setting status set to RESTRICT_TOKENS_AND_JOB_RUN_AS, workspace admins can + * only create personal access tokens on behalf of service principals they have the Service + * Principal User role on. They can also only change a job owner to themselves. And they can change + * the job run_as setting to themselves or to a service principal on which they have the Service + * Principal User role. + * + *

This is the high-level interface, that contains generated methods. + * + *

Evolving: this interface is under development. Method signatures may change. + */ +@Generated +public interface RestrictWorkspaceAdminsService { + /** + * Delete the restrict workspace admins setting. + * + *

Reverts the restrict workspace admins setting status for the workspace. A fresh etag needs + * to be provided in `DELETE` requests (as a query parameter). The etag can be retrieved by making + * a `GET` request before the DELETE request. If the setting is updated/deleted concurrently, + * `DELETE` fails with 409 and the request must be retried by using the fresh etag in the 409 + * response. + */ + DeleteRestrictWorkspaceAdminsSettingResponse delete( + DeleteRestrictWorkspaceAdminsSettingRequest deleteRestrictWorkspaceAdminsSettingRequest); + + /** + * Get the restrict workspace admins setting. + * + *

Gets the restrict workspace admins setting. + */ + RestrictWorkspaceAdminsSetting get( + GetRestrictWorkspaceAdminsSettingRequest getRestrictWorkspaceAdminsSettingRequest); + + /** + * Update the restrict workspace admins setting. + * + *

Updates the restrict workspace admins setting for the workspace. A fresh etag needs to be + * provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by + * making a GET request before the `PATCH` request. If the setting is updated concurrently, + * `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 + * response. + */ + RestrictWorkspaceAdminsSetting update( + UpdateRestrictWorkspaceAdminsSettingRequest updateRestrictWorkspaceAdminsSettingRequest); +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java index 0b7ba1547..aa15e0026 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java @@ -6,223 +6,71 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** - * The default namespace setting API allows users to configure the default namespace for a - * Databricks workspace. - * - *

Through this API, users can retrieve, set, or modify the default namespace used when queries - * do not reference a fully qualified three-level name. For example, if you use the API to set - * 'retail_prod' as the default catalog, then a query 'SELECT * FROM myTable' would reference the - * object 'retail_prod.default.myTable' (the schema 'default' is always assumed). - * - *

This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, - * the default namespace only applies when using Unity Catalog-enabled compute. - */ +/** Workspace Settings API allows users to manage settings at the workspace level. */ @Generated public class SettingsAPI { private static final Logger LOG = LoggerFactory.getLogger(SettingsAPI.class); private final SettingsService impl; - /** Regular-use constructor */ - public SettingsAPI(ApiClient apiClient) { - impl = new SettingsImpl(apiClient); - } + private AutomaticClusterUpdateAPI automaticClusterUpdateAPI; - /** Constructor for mocks */ - public SettingsAPI(SettingsService mock) { - impl = mock; - } + private CspEnablementAPI cspEnablementAPI; - /** - * Delete the default namespace setting. - * - *

Deletes the default namespace setting for the workspace. A fresh etag needs to be provided - * in `DELETE` requests (as a query parameter). The etag can be retrieved by making a `GET` - * request before the `DELETE` request. If the setting is updated/deleted concurrently, `DELETE` - * fails with 409 and the request must be retried by using the fresh etag in the 409 response. - */ - public DeleteDefaultNamespaceSettingResponse deleteDefaultNamespaceSetting( - DeleteDefaultNamespaceSettingRequest request) { - return impl.deleteDefaultNamespaceSetting(request); - } + private DefaultNamespaceAPI defaultNamespaceAPI; - /** - * Delete the restrict workspace admins setting. - * - *

Reverts the restrict workspace admins setting status for the workspace. A fresh etag needs - * to be provided in `DELETE` requests (as a query parameter). The etag can be retrieved by making - * a `GET` request before the DELETE request. If the setting is updated/deleted concurrently, - * `DELETE` fails with 409 and the request must be retried by using the fresh etag in the 409 - * response. - */ - public DeleteRestrictWorkspaceAdminsSettingResponse deleteRestrictWorkspaceAdminsSetting( - DeleteRestrictWorkspaceAdminsSettingRequest request) { - return impl.deleteRestrictWorkspaceAdminsSetting(request); - } - - /** - * Get the automatic cluster update setting. - * - *

Gets the automatic cluster update setting. - */ - public AutomaticClusterUpdateSetting getAutomaticClusterUpdateSetting( - GetAutomaticClusterUpdateSettingRequest request) { - return impl.getAutomaticClusterUpdateSetting(request); - } + private EsmEnablementAPI esmEnablementAPI; - /** - * Get the compliance security profile setting. - * - *

Gets the compliance security profile setting. - */ - public CspEnablementSetting getCspEnablementSetting(GetCspEnablementSettingRequest request) { - return impl.getCspEnablementSetting(request); - } - - /** - * Get the default namespace setting. - * - *

Gets the default namespace setting. - */ - public DefaultNamespaceSetting getDefaultNamespaceSetting( - GetDefaultNamespaceSettingRequest request) { - return impl.getDefaultNamespaceSetting(request); - } + private RestrictWorkspaceAdminsAPI restrictWorkspaceAdminsAPI; - /** - * Get the enhanced security monitoring setting. - * - *

Gets the enhanced security monitoring setting. - */ - public EsmEnablementSetting getEsmEnablementSetting(GetEsmEnablementSettingRequest request) { - return impl.getEsmEnablementSetting(request); - } + /** Regular-use constructor */ + public SettingsAPI(ApiClient apiClient) { + impl = new SettingsImpl(apiClient); - /** - * Get the restrict workspace admins setting. - * - *

Gets the restrict workspace admins setting. - */ - public RestrictWorkspaceAdminsSetting getRestrictWorkspaceAdminsSetting( - GetRestrictWorkspaceAdminsSettingRequest request) { - return impl.getRestrictWorkspaceAdminsSetting(request); - } + automaticClusterUpdateAPI = new AutomaticClusterUpdateAPI(apiClient); - public AutomaticClusterUpdateSetting updateAutomaticClusterUpdateSetting( - boolean allowMissing, AutomaticClusterUpdateSetting setting, String fieldMask) { - return updateAutomaticClusterUpdateSetting( - new UpdateAutomaticClusterUpdateSettingRequest() - .setAllowMissing(allowMissing) - .setSetting(setting) - .setFieldMask(fieldMask)); - } + cspEnablementAPI = new CspEnablementAPI(apiClient); - /** - * Update the automatic cluster update setting. - * - *

Updates the automatic cluster update setting for the workspace. A fresh etag needs to be - * provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by - * making a `GET` request before the `PATCH` request. If the setting is updated concurrently, - * `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 - * response. - */ - public AutomaticClusterUpdateSetting updateAutomaticClusterUpdateSetting( - UpdateAutomaticClusterUpdateSettingRequest request) { - return impl.updateAutomaticClusterUpdateSetting(request); - } + defaultNamespaceAPI = new DefaultNamespaceAPI(apiClient); - public CspEnablementSetting updateCspEnablementSetting( - boolean allowMissing, CspEnablementSetting setting, String fieldMask) { - return updateCspEnablementSetting( - new UpdateCspEnablementSettingRequest() - .setAllowMissing(allowMissing) - .setSetting(setting) - .setFieldMask(fieldMask)); - } + esmEnablementAPI = new EsmEnablementAPI(apiClient); - /** - * Update the compliance security profile setting. - * - *

Updates the compliance security profile setting for the workspace. A fresh etag needs to be - * provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by - * making a `GET` request before the `PATCH` request. If the setting is updated concurrently, - * `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 - * response. - */ - public CspEnablementSetting updateCspEnablementSetting( - UpdateCspEnablementSettingRequest request) { - return impl.updateCspEnablementSetting(request); + restrictWorkspaceAdminsAPI = new RestrictWorkspaceAdminsAPI(apiClient); } - public DefaultNamespaceSetting updateDefaultNamespaceSetting( - boolean allowMissing, DefaultNamespaceSetting setting, String fieldMask) { - return updateDefaultNamespaceSetting( - new UpdateDefaultNamespaceSettingRequest() - .setAllowMissing(allowMissing) - .setSetting(setting) - .setFieldMask(fieldMask)); + /** Constructor for mocks */ + public SettingsAPI(SettingsService mock) { + impl = mock; } - /** - * Update the default namespace setting. - * - *

Updates the default namespace setting for the workspace. A fresh etag needs to be provided - * in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` - * request before the `PATCH` request. Note that if the setting does not exist, `GET` returns a - * NOT_FOUND error and the etag is present in the error response, which should be set in the - * `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the request - * must be retried by using the fresh etag in the 409 response. - */ - public DefaultNamespaceSetting updateDefaultNamespaceSetting( - UpdateDefaultNamespaceSettingRequest request) { - return impl.updateDefaultNamespaceSetting(request); + /** Controls whether automatic cluster update is enabled for the current workspace. */ + public AutomaticClusterUpdateAPI AutomaticClusterUpdate() { + return automaticClusterUpdateAPI; } - public EsmEnablementSetting updateEsmEnablementSetting( - boolean allowMissing, EsmEnablementSetting setting, String fieldMask) { - return updateEsmEnablementSetting( - new UpdateEsmEnablementSettingRequest() - .setAllowMissing(allowMissing) - .setSetting(setting) - .setFieldMask(fieldMask)); + /** Controls whether to enable the compliance security profile for the current workspace. */ + public CspEnablementAPI CspEnablement() { + return cspEnablementAPI; } /** - * Update the enhanced security monitoring setting. - * - *

Updates the enhanced security monitoring setting for the workspace. A fresh etag needs to be - * provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by - * making a `GET` request before the `PATCH` request. If the setting is updated concurrently, - * `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 - * response. + * The default namespace setting API allows users to configure the default namespace for a + * Databricks workspace. */ - public EsmEnablementSetting updateEsmEnablementSetting( - UpdateEsmEnablementSettingRequest request) { - return impl.updateEsmEnablementSetting(request); + public DefaultNamespaceAPI DefaultNamespace() { + return defaultNamespaceAPI; } - public RestrictWorkspaceAdminsSetting updateRestrictWorkspaceAdminsSetting( - boolean allowMissing, RestrictWorkspaceAdminsSetting setting, String fieldMask) { - return updateRestrictWorkspaceAdminsSetting( - new UpdateRestrictWorkspaceAdminsSettingRequest() - .setAllowMissing(allowMissing) - .setSetting(setting) - .setFieldMask(fieldMask)); + /** Controls whether enhanced security monitoring is enabled for the current workspace. */ + public EsmEnablementAPI EsmEnablement() { + return esmEnablementAPI; } /** - * Update the restrict workspace admins setting. - * - *

Updates the restrict workspace admins setting for the workspace. A fresh etag needs to be - * provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by - * making a GET request before the `PATCH` request. If the setting is updated concurrently, - * `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 - * response. + * The Restrict Workspace Admins setting lets you control the capabilities of workspace admins. */ - public RestrictWorkspaceAdminsSetting updateRestrictWorkspaceAdminsSetting( - UpdateRestrictWorkspaceAdminsSettingRequest request) { - return impl.updateRestrictWorkspaceAdminsSetting(request); + public RestrictWorkspaceAdminsAPI RestrictWorkspaceAdmins() { + return restrictWorkspaceAdminsAPI; } public SettingsService impl() { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsImpl.java index 1a0d2f522..5f5740dd1 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsImpl.java @@ -3,8 +3,6 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; -import java.util.HashMap; -import java.util.Map; /** Package-local implementation of Settings */ @Generated @@ -14,116 +12,4 @@ class SettingsImpl implements SettingsService { public SettingsImpl(ApiClient apiClient) { this.apiClient = apiClient; } - - @Override - public DeleteDefaultNamespaceSettingResponse deleteDefaultNamespaceSetting( - DeleteDefaultNamespaceSettingRequest request) { - String path = "/api/2.0/settings/types/default_namespace_ws/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.DELETE(path, request, DeleteDefaultNamespaceSettingResponse.class, headers); - } - - @Override - public DeleteRestrictWorkspaceAdminsSettingResponse deleteRestrictWorkspaceAdminsSetting( - DeleteRestrictWorkspaceAdminsSettingRequest request) { - String path = "/api/2.0/settings/types/restrict_workspace_admins/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.DELETE( - path, request, DeleteRestrictWorkspaceAdminsSettingResponse.class, headers); - } - - @Override - public AutomaticClusterUpdateSetting getAutomaticClusterUpdateSetting( - GetAutomaticClusterUpdateSettingRequest request) { - String path = "/api/2.0/settings/types/automatic_cluster_update/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, AutomaticClusterUpdateSetting.class, headers); - } - - @Override - public CspEnablementSetting getCspEnablementSetting(GetCspEnablementSettingRequest request) { - String path = "/api/2.0/settings/types/shield_csp_enablement_ws_db/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, CspEnablementSetting.class, headers); - } - - @Override - public DefaultNamespaceSetting getDefaultNamespaceSetting( - GetDefaultNamespaceSettingRequest request) { - String path = "/api/2.0/settings/types/default_namespace_ws/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, DefaultNamespaceSetting.class, headers); - } - - @Override - public EsmEnablementSetting getEsmEnablementSetting(GetEsmEnablementSettingRequest request) { - String path = "/api/2.0/settings/types/shield_esm_enablement_ws_db/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, EsmEnablementSetting.class, headers); - } - - @Override - public RestrictWorkspaceAdminsSetting getRestrictWorkspaceAdminsSetting( - GetRestrictWorkspaceAdminsSettingRequest request) { - String path = "/api/2.0/settings/types/restrict_workspace_admins/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - return apiClient.GET(path, request, RestrictWorkspaceAdminsSetting.class, headers); - } - - @Override - public AutomaticClusterUpdateSetting updateAutomaticClusterUpdateSetting( - UpdateAutomaticClusterUpdateSettingRequest request) { - String path = "/api/2.0/settings/types/automatic_cluster_update/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, AutomaticClusterUpdateSetting.class, headers); - } - - @Override - public CspEnablementSetting updateCspEnablementSetting( - UpdateCspEnablementSettingRequest request) { - String path = "/api/2.0/settings/types/shield_csp_enablement_ws_db/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, CspEnablementSetting.class, headers); - } - - @Override - public DefaultNamespaceSetting updateDefaultNamespaceSetting( - UpdateDefaultNamespaceSettingRequest request) { - String path = "/api/2.0/settings/types/default_namespace_ws/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, DefaultNamespaceSetting.class, headers); - } - - @Override - public EsmEnablementSetting updateEsmEnablementSetting( - UpdateEsmEnablementSettingRequest request) { - String path = "/api/2.0/settings/types/shield_esm_enablement_ws_db/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, EsmEnablementSetting.class, headers); - } - - @Override - public RestrictWorkspaceAdminsSetting updateRestrictWorkspaceAdminsSetting( - UpdateRestrictWorkspaceAdminsSettingRequest request) { - String path = "/api/2.0/settings/types/restrict_workspace_admins/names/default"; - Map headers = new HashMap<>(); - headers.put("Accept", "application/json"); - headers.put("Content-Type", "application/json"); - return apiClient.PATCH(path, request, RestrictWorkspaceAdminsSetting.class, headers); - } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsService.java index 427bee819..2d5c8d50e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsService.java @@ -4,144 +4,11 @@ import com.databricks.sdk.support.Generated; /** - * The default namespace setting API allows users to configure the default namespace for a - * Databricks workspace. - * - *

Through this API, users can retrieve, set, or modify the default namespace used when queries - * do not reference a fully qualified three-level name. For example, if you use the API to set - * 'retail_prod' as the default catalog, then a query 'SELECT * FROM myTable' would reference the - * object 'retail_prod.default.myTable' (the schema 'default' is always assumed). - * - *

This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, - * the default namespace only applies when using Unity Catalog-enabled compute. + * Workspace Settings API allows users to manage settings at the workspace level. * *

This is the high-level interface, that contains generated methods. * *

Evolving: this interface is under development. Method signatures may change. */ @Generated -public interface SettingsService { - /** - * Delete the default namespace setting. - * - *

Deletes the default namespace setting for the workspace. A fresh etag needs to be provided - * in `DELETE` requests (as a query parameter). The etag can be retrieved by making a `GET` - * request before the `DELETE` request. If the setting is updated/deleted concurrently, `DELETE` - * fails with 409 and the request must be retried by using the fresh etag in the 409 response. - */ - DeleteDefaultNamespaceSettingResponse deleteDefaultNamespaceSetting( - DeleteDefaultNamespaceSettingRequest deleteDefaultNamespaceSettingRequest); - - /** - * Delete the restrict workspace admins setting. - * - *

Reverts the restrict workspace admins setting status for the workspace. A fresh etag needs - * to be provided in `DELETE` requests (as a query parameter). The etag can be retrieved by making - * a `GET` request before the DELETE request. If the setting is updated/deleted concurrently, - * `DELETE` fails with 409 and the request must be retried by using the fresh etag in the 409 - * response. - */ - DeleteRestrictWorkspaceAdminsSettingResponse deleteRestrictWorkspaceAdminsSetting( - DeleteRestrictWorkspaceAdminsSettingRequest deleteRestrictWorkspaceAdminsSettingRequest); - - /** - * Get the automatic cluster update setting. - * - *

Gets the automatic cluster update setting. - */ - AutomaticClusterUpdateSetting getAutomaticClusterUpdateSetting( - GetAutomaticClusterUpdateSettingRequest getAutomaticClusterUpdateSettingRequest); - - /** - * Get the compliance security profile setting. - * - *

Gets the compliance security profile setting. - */ - CspEnablementSetting getCspEnablementSetting( - GetCspEnablementSettingRequest getCspEnablementSettingRequest); - - /** - * Get the default namespace setting. - * - *

Gets the default namespace setting. - */ - DefaultNamespaceSetting getDefaultNamespaceSetting( - GetDefaultNamespaceSettingRequest getDefaultNamespaceSettingRequest); - - /** - * Get the enhanced security monitoring setting. - * - *

Gets the enhanced security monitoring setting. - */ - EsmEnablementSetting getEsmEnablementSetting( - GetEsmEnablementSettingRequest getEsmEnablementSettingRequest); - - /** - * Get the restrict workspace admins setting. - * - *

Gets the restrict workspace admins setting. - */ - RestrictWorkspaceAdminsSetting getRestrictWorkspaceAdminsSetting( - GetRestrictWorkspaceAdminsSettingRequest getRestrictWorkspaceAdminsSettingRequest); - - /** - * Update the automatic cluster update setting. - * - *

Updates the automatic cluster update setting for the workspace. A fresh etag needs to be - * provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by - * making a `GET` request before the `PATCH` request. If the setting is updated concurrently, - * `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 - * response. - */ - AutomaticClusterUpdateSetting updateAutomaticClusterUpdateSetting( - UpdateAutomaticClusterUpdateSettingRequest updateAutomaticClusterUpdateSettingRequest); - - /** - * Update the compliance security profile setting. - * - *

Updates the compliance security profile setting for the workspace. A fresh etag needs to be - * provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by - * making a `GET` request before the `PATCH` request. If the setting is updated concurrently, - * `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 - * response. - */ - CspEnablementSetting updateCspEnablementSetting( - UpdateCspEnablementSettingRequest updateCspEnablementSettingRequest); - - /** - * Update the default namespace setting. - * - *

Updates the default namespace setting for the workspace. A fresh etag needs to be provided - * in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` - * request before the `PATCH` request. Note that if the setting does not exist, `GET` returns a - * NOT_FOUND error and the etag is present in the error response, which should be set in the - * `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the request - * must be retried by using the fresh etag in the 409 response. - */ - DefaultNamespaceSetting updateDefaultNamespaceSetting( - UpdateDefaultNamespaceSettingRequest updateDefaultNamespaceSettingRequest); - - /** - * Update the enhanced security monitoring setting. - * - *

Updates the enhanced security monitoring setting for the workspace. A fresh etag needs to be - * provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by - * making a `GET` request before the `PATCH` request. If the setting is updated concurrently, - * `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 - * response. - */ - EsmEnablementSetting updateEsmEnablementSetting( - UpdateEsmEnablementSettingRequest updateEsmEnablementSettingRequest); - - /** - * Update the restrict workspace admins setting. - * - *

Updates the restrict workspace admins setting for the workspace. A fresh etag needs to be - * provided in `PATCH` requests (as part of the setting field). The etag can be retrieved by - * making a GET request before the `PATCH` request. If the setting is updated concurrently, - * `PATCH` fails with 409 and the request must be retried by using the fresh etag in the 409 - * response. - */ - RestrictWorkspaceAdminsSetting updateRestrictWorkspaceAdminsSetting( - UpdateRestrictWorkspaceAdminsSettingRequest updateRestrictWorkspaceAdminsSettingRequest); -} +public interface SettingsService {} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementAPI.java index 965f2e1c8..f105eef54 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenManagementAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -89,7 +90,8 @@ public TokenPermissions getPermissions() { *

Lists all tokens associated with the specified workspace or user. */ public Iterable list(ListTokenManagementRequest request) { - return impl.list(request).getTokenInfos(); + return new Paginator<>( + request, impl::list, ListTokensResponse::getTokenInfos, response -> null); } /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensAPI.java index 8058491e5..070f94996 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokensAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -59,7 +60,8 @@ public void delete(RevokeTokenRequest request) { *

Lists all the valid tokens for a user-workspace pair. */ public Iterable list() { - return impl.list().getTokenInfos(); + return new Paginator<>( + null, (Void v) -> impl.list(), ListPublicTokensResponse::getTokenInfos, response -> null); } public TokensService impl() { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Privilege.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Privilege.java index b0e9d7f55..87d99f5c9 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Privilege.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/Privilege.java @@ -6,6 +6,7 @@ @Generated public enum Privilege { + ACCESS, ALL_PRIVILEGES, APPLY_TAG, CREATE, @@ -22,6 +23,7 @@ public enum Privilege { CREATE_PROVIDER, CREATE_RECIPIENT, CREATE_SCHEMA, + CREATE_SERVICE_CREDENTIAL, CREATE_SHARE, CREATE_STORAGE_CREDENTIAL, CREATE_TABLE, diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersAPI.java index f24a120f5..84860b413 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ProvidersAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -76,7 +77,8 @@ public ProviderInfo get(GetProviderRequest request) { * response. There is no guarantee of a specific ordering of the elements in the array. */ public Iterable list(ListProvidersRequest request) { - return impl.list(request).getProviders(); + return new Paginator<>( + request, impl::list, ListProvidersResponse::getProviders, response -> null); } public Iterable listShares(String name) { @@ -91,7 +93,8 @@ public Iterable listShares(String name) { *

* the caller is a metastore admin, or * the caller is the owner. */ public Iterable listShares(ListSharesRequest request) { - return impl.listShares(request).getShares(); + return new Paginator<>( + request, impl::listShares, ListProviderSharesResponse::getShares, response -> null); } public ProviderInfo update(String name) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsAPI.java index f74ccd8c5..b40fac9e5 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/RecipientsAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -90,7 +91,8 @@ public RecipientInfo get(GetRecipientRequest request) { * specific ordering of the elements in the array. */ public Iterable list(ListRecipientsRequest request) { - return impl.list(request).getRecipients(); + return new Paginator<>( + request, impl::list, ListRecipientsResponse::getRecipients, response -> null); } public RecipientInfo rotateToken(String name, long existingTokenExpireInSeconds) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java index 381139da8..db3ddc205 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObject.java @@ -26,9 +26,17 @@ public class SharedDataObject { @JsonProperty("comment") private String comment; + /** + * The content of the notebook file when the data object type is NOTEBOOK_FILE. This should be + * base64 encoded. Required for adding a NOTEBOOK_FILE, optional for updating, ignored for other + * types. + */ + @JsonProperty("content") + private String content; + /** The type of the data object. */ @JsonProperty("data_object_type") - private String dataObjectType; + private SharedDataObjectDataObjectType dataObjectType; /** * Whether to enable or disable sharing of data history. If not specified, the default is @@ -119,12 +127,21 @@ public String getComment() { return comment; } - public SharedDataObject setDataObjectType(String dataObjectType) { + public SharedDataObject setContent(String content) { + this.content = content; + return this; + } + + public String getContent() { + return content; + } + + public SharedDataObject setDataObjectType(SharedDataObjectDataObjectType dataObjectType) { this.dataObjectType = dataObjectType; return this; } - public String getDataObjectType() { + public SharedDataObjectDataObjectType getDataObjectType() { return dataObjectType; } @@ -201,6 +218,7 @@ public boolean equals(Object o) { && Objects.equals(addedBy, that.addedBy) && Objects.equals(cdfEnabled, that.cdfEnabled) && Objects.equals(comment, that.comment) + && Objects.equals(content, that.content) && Objects.equals(dataObjectType, that.dataObjectType) && Objects.equals(historyDataSharingStatus, that.historyDataSharingStatus) && Objects.equals(name, that.name) @@ -218,6 +236,7 @@ public int hashCode() { addedBy, cdfEnabled, comment, + content, dataObjectType, historyDataSharingStatus, name, @@ -235,6 +254,7 @@ public String toString() { .add("addedBy", addedBy) .add("cdfEnabled", cdfEnabled) .add("comment", comment) + .add("content", content) .add("dataObjectType", dataObjectType) .add("historyDataSharingStatus", historyDataSharingStatus) .add("name", name) diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectDataObjectType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectDataObjectType.java new file mode 100755 index 000000000..69d3ba9b8 --- /dev/null +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharedDataObjectDataObjectType.java @@ -0,0 +1,17 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package com.databricks.sdk.service.sharing; + +import com.databricks.sdk.support.Generated; + +/** The type of the data object. */ +@Generated +public enum SharedDataObjectDataObjectType { + MATERIALIZED_VIEW, + MODEL, + NOTEBOOK_FILE, + SCHEMA, + STREAMING_TABLE, + TABLE, + VIEW, +} diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesAPI.java index 758a460d5..f2e211e6c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/SharesAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -78,7 +79,8 @@ public ShareInfo get(GetShareRequest request) { * array. */ public Iterable list() { - return impl.list().getShares(); + return new Paginator<>( + null, (Void v) -> impl.list(), ListSharesResponse::getShares, response -> null); } public com.databricks.sdk.service.catalog.PermissionsList sharePermissions(String name) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertQuery.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertQuery.java index 627aa8edc..34800625e 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertQuery.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/AlertQuery.java @@ -76,7 +76,7 @@ public class AlertQuery { @JsonProperty("updated_at") private String updatedAt; - /** The ID of the user who created this query. */ + /** The ID of the user who owns the query. */ @JsonProperty("user_id") private Long userId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Dashboard.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Dashboard.java index 486d579ae..2b8ca46fd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Dashboard.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Dashboard.java @@ -89,7 +89,7 @@ public class Dashboard { @JsonProperty("user") private User user; - /** The ID of the user that created and owns this dashboard. */ + /** The ID of the user who owns the dashboard. */ @JsonProperty("user_id") private Long userId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Query.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Query.java index 5432d2c0a..f936b674c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Query.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Query.java @@ -128,7 +128,7 @@ public class Query { @JsonProperty("user") private User user; - /** The ID of the user who created this query. */ + /** The ID of the user who owns the query. */ @JsonProperty("user_id") private Long userId; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesAPI.java index ee0f01a70..ec36bbcdf 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/WarehousesAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import com.databricks.sdk.support.Wait; import java.time.Duration; import java.util.Arrays; @@ -218,7 +219,8 @@ public GetWorkspaceWarehouseConfigResponse getWorkspaceWarehouseConfig() { *

Lists all SQL warehouses that a user has manager permissions on. */ public Iterable list(ListWarehousesRequest request) { - return impl.list(request).getWarehouses(); + return new Paginator<>( + request, impl::list, ListWarehousesResponse::getWarehouses, response -> null); } public WarehousePermissions setPermissions(String warehouseId) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexRequest.java index 281799d92..5405fc1cd 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteDataVectorIndexRequest.java @@ -12,19 +12,19 @@ @Generated public class DeleteDataVectorIndexRequest { /** Name of the vector index where data is to be deleted. Must be a Direct Vector Access Index. */ - private String name; + private String indexName; /** List of primary keys for the data to be deleted. */ @JsonProperty("primary_keys") private Collection primaryKeys; - public DeleteDataVectorIndexRequest setName(String name) { - this.name = name; + public DeleteDataVectorIndexRequest setIndexName(String indexName) { + this.indexName = indexName; return this; } - public String getName() { - return name; + public String getIndexName() { + return indexName; } public DeleteDataVectorIndexRequest setPrimaryKeys(Collection primaryKeys) { @@ -41,18 +41,19 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DeleteDataVectorIndexRequest that = (DeleteDataVectorIndexRequest) o; - return Objects.equals(name, that.name) && Objects.equals(primaryKeys, that.primaryKeys); + return Objects.equals(indexName, that.indexName) + && Objects.equals(primaryKeys, that.primaryKeys); } @Override public int hashCode() { - return Objects.hash(name, primaryKeys); + return Objects.hash(indexName, primaryKeys); } @Override public String toString() { return new ToStringer(DeleteDataVectorIndexRequest.class) - .add("name", name) + .add("indexName", indexName) .add("primaryKeys", primaryKeys) .toString(); } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointRequest.java index 13a48964d..216fb4e53 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeleteEndpointRequest.java @@ -12,9 +12,6 @@ public class DeleteEndpointRequest { /** Name of the endpoint */ private String endpointName; - /** Name of the endpoint to delete */ - private String name; - public DeleteEndpointRequest setEndpointName(String endpointName) { this.endpointName = endpointName; return this; @@ -24,33 +21,21 @@ public String getEndpointName() { return endpointName; } - public DeleteEndpointRequest setName(String name) { - this.name = name; - return this; - } - - public String getName() { - return name; - } - @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DeleteEndpointRequest that = (DeleteEndpointRequest) o; - return Objects.equals(endpointName, that.endpointName) && Objects.equals(name, that.name); + return Objects.equals(endpointName, that.endpointName); } @Override public int hashCode() { - return Objects.hash(endpointName, name); + return Objects.hash(endpointName); } @Override public String toString() { - return new ToStringer(DeleteEndpointRequest.class) - .add("endpointName", endpointName) - .add("name", name) - .toString(); + return new ToStringer(DeleteEndpointRequest.class).add("endpointName", endpointName).toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DirectAccessVectorIndexSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DirectAccessVectorIndexSpec.java index d2e5886ab..406639195 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DirectAccessVectorIndexSpec.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DirectAccessVectorIndexSpec.java @@ -10,6 +10,10 @@ @Generated public class DirectAccessVectorIndexSpec { + /** Contains the optional model endpoint to use during query time. */ + @JsonProperty("embedding_source_columns") + private Collection embeddingSourceColumns; + /** */ @JsonProperty("embedding_vector_columns") private Collection embeddingVectorColumns; @@ -25,6 +29,16 @@ public class DirectAccessVectorIndexSpec { @JsonProperty("schema_json") private String schemaJson; + public DirectAccessVectorIndexSpec setEmbeddingSourceColumns( + Collection embeddingSourceColumns) { + this.embeddingSourceColumns = embeddingSourceColumns; + return this; + } + + public Collection getEmbeddingSourceColumns() { + return embeddingSourceColumns; + } + public DirectAccessVectorIndexSpec setEmbeddingVectorColumns( Collection embeddingVectorColumns) { this.embeddingVectorColumns = embeddingVectorColumns; @@ -49,18 +63,20 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DirectAccessVectorIndexSpec that = (DirectAccessVectorIndexSpec) o; - return Objects.equals(embeddingVectorColumns, that.embeddingVectorColumns) + return Objects.equals(embeddingSourceColumns, that.embeddingSourceColumns) + && Objects.equals(embeddingVectorColumns, that.embeddingVectorColumns) && Objects.equals(schemaJson, that.schemaJson); } @Override public int hashCode() { - return Objects.hash(embeddingVectorColumns, schemaJson); + return Objects.hash(embeddingSourceColumns, embeddingVectorColumns, schemaJson); } @Override public String toString() { return new ToStringer(DirectAccessVectorIndexSpec.class) + .add("embeddingSourceColumns", embeddingSourceColumns) .add("embeddingVectorColumns", embeddingVectorColumns) .add("schemaJson", schemaJson) .toString(); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java index 3e7b389e8..245748834 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java @@ -42,6 +42,10 @@ public class QueryVectorIndexRequest { @JsonProperty("query_vector") private Collection queryVector; + /** Threshold for the approximate nearest neighbor search. Defaults to 0.0. */ + @JsonProperty("score_threshold") + private Double scoreThreshold; + public QueryVectorIndexRequest setColumns(Collection columns) { this.columns = columns; return this; @@ -96,6 +100,15 @@ public Collection getQueryVector() { return queryVector; } + public QueryVectorIndexRequest setScoreThreshold(Double scoreThreshold) { + this.scoreThreshold = scoreThreshold; + return this; + } + + public Double getScoreThreshold() { + return scoreThreshold; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -106,12 +119,14 @@ public boolean equals(Object o) { && Objects.equals(indexName, that.indexName) && Objects.equals(numResults, that.numResults) && Objects.equals(queryText, that.queryText) - && Objects.equals(queryVector, that.queryVector); + && Objects.equals(queryVector, that.queryVector) + && Objects.equals(scoreThreshold, that.scoreThreshold); } @Override public int hashCode() { - return Objects.hash(columns, filtersJson, indexName, numResults, queryText, queryVector); + return Objects.hash( + columns, filtersJson, indexName, numResults, queryText, queryVector, scoreThreshold); } @Override @@ -123,6 +138,7 @@ public String toString() { .add("numResults", numResults) .add("queryText", queryText) .add("queryVector", queryVector) + .add("scoreThreshold", scoreThreshold) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataVectorIndexRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataVectorIndexRequest.java index bf114b3c0..90c4ff4c8 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataVectorIndexRequest.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/UpsertDataVectorIndexRequest.java @@ -10,31 +10,31 @@ /** Request payload for upserting data into a vector index. */ @Generated public class UpsertDataVectorIndexRequest { - /** JSON string representing the data to be upserted. */ - @JsonProperty("inputs_json") - private String inputsJson; - /** * Name of the vector index where data is to be upserted. Must be a Direct Vector Access Index. */ - private String name; + private String indexName; - public UpsertDataVectorIndexRequest setInputsJson(String inputsJson) { - this.inputsJson = inputsJson; + /** JSON string representing the data to be upserted. */ + @JsonProperty("inputs_json") + private String inputsJson; + + public UpsertDataVectorIndexRequest setIndexName(String indexName) { + this.indexName = indexName; return this; } - public String getInputsJson() { - return inputsJson; + public String getIndexName() { + return indexName; } - public UpsertDataVectorIndexRequest setName(String name) { - this.name = name; + public UpsertDataVectorIndexRequest setInputsJson(String inputsJson) { + this.inputsJson = inputsJson; return this; } - public String getName() { - return name; + public String getInputsJson() { + return inputsJson; } @Override @@ -42,19 +42,19 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; UpsertDataVectorIndexRequest that = (UpsertDataVectorIndexRequest) o; - return Objects.equals(inputsJson, that.inputsJson) && Objects.equals(name, that.name); + return Objects.equals(indexName, that.indexName) && Objects.equals(inputsJson, that.inputsJson); } @Override public int hashCode() { - return Objects.hash(inputsJson, name); + return Objects.hash(indexName, inputsJson); } @Override public String toString() { return new ToStringer(UpsertDataVectorIndexRequest.class) + .add("indexName", indexName) .add("inputsJson", inputsJson) - .add("name", name) .toString(); } } diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsAPI.java index afbb2b787..ae5a5f350 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchEndpointsAPI.java @@ -94,8 +94,8 @@ public Wait createEndpoint(CreateEndpoint request) { response); } - public void deleteEndpoint(String endpointName, String name) { - deleteEndpoint(new DeleteEndpointRequest().setEndpointName(endpointName).setName(name)); + public void deleteEndpoint(String endpointName) { + deleteEndpoint(new DeleteEndpointRequest().setEndpointName(endpointName)); } /** Delete an endpoint. */ diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesAPI.java index 55d53a153..e9d59992f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesAPI.java @@ -35,10 +35,11 @@ public VectorSearchIndexesAPI(VectorSearchIndexesService mock) { } public CreateVectorIndexResponse createIndex( - String name, String primaryKey, VectorIndexType indexType) { + String name, String endpointName, String primaryKey, VectorIndexType indexType) { return createIndex( new CreateVectorIndexRequest() .setName(name) + .setEndpointName(endpointName) .setPrimaryKey(primaryKey) .setIndexType(indexType)); } @@ -53,9 +54,9 @@ public CreateVectorIndexResponse createIndex(CreateVectorIndexRequest request) { } public DeleteDataVectorIndexResponse deleteDataVectorIndex( - String name, Collection primaryKeys) { + String indexName, Collection primaryKeys) { return deleteDataVectorIndex( - new DeleteDataVectorIndexRequest().setName(name).setPrimaryKeys(primaryKeys)); + new DeleteDataVectorIndexRequest().setIndexName(indexName).setPrimaryKeys(primaryKeys)); } /** @@ -142,9 +143,9 @@ public void syncIndex(SyncIndexRequest request) { impl.syncIndex(request); } - public UpsertDataVectorIndexResponse upsertDataVectorIndex(String name, String inputsJson) { + public UpsertDataVectorIndexResponse upsertDataVectorIndex(String indexName, String inputsJson) { return upsertDataVectorIndex( - new UpsertDataVectorIndexRequest().setName(name).setInputsJson(inputsJson)); + new UpsertDataVectorIndexRequest().setIndexName(indexName).setInputsJson(inputsJson)); } /** diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesImpl.java index 2156460fc..7d70a3055 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesImpl.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/VectorSearchIndexesImpl.java @@ -26,7 +26,8 @@ public CreateVectorIndexResponse createIndex(CreateVectorIndexRequest request) { @Override public DeleteDataVectorIndexResponse deleteDataVectorIndex(DeleteDataVectorIndexRequest request) { - String path = String.format("/api/2.0/vector-search/indexes/%s/delete-data", request.getName()); + String path = + String.format("/api/2.0/vector-search/indexes/%s/delete-data", request.getIndexName()); Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); @@ -74,7 +75,8 @@ public void syncIndex(SyncIndexRequest request) { @Override public UpsertDataVectorIndexResponse upsertDataVectorIndex(UpsertDataVectorIndexRequest request) { - String path = String.format("/api/2.0/vector-search/indexes/%s/upsert-data", request.getName()); + String path = + String.format("/api/2.0/vector-search/indexes/%s/upsert-data", request.getIndexName()); Map headers = new HashMap<>(); headers.put("Accept", "application/json"); headers.put("Content-Type", "application/json"); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentials.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentials.java index 88d28f6f4..b30d35e8f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentials.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentials.java @@ -17,11 +17,24 @@ public class CreateCredentials { @JsonProperty("git_provider") private String gitProvider; - /** Git username. */ + /** + * The username or email provided with your Git provider account, depending on which provider you + * are using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or + * username may be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS + * CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers + * please see your provider's Personal Access Token authentication documentation to see what is + * supported. + */ @JsonProperty("git_username") private String gitUsername; - /** The personal access token used to authenticate to the corresponding Git provider. */ + /** + * The personal access token used to authenticate to the corresponding Git provider. For certain + * providers, support may exist for other types of scoped access tokens. [Learn more]. The + * personal access token used to authenticate to the corresponding Git + * + *

[Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html + */ @JsonProperty("personal_access_token") private String personalAccessToken; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsResponse.java index f6b3db692..5f693ebf0 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsResponse.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CreateCredentialsResponse.java @@ -21,7 +21,14 @@ public class CreateCredentialsResponse { @JsonProperty("git_provider") private String gitProvider; - /** Git username. */ + /** + * The username or email provided with your Git provider account, depending on which provider you + * are using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or + * username may be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS + * CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers + * please see your provider's Personal Access Token authentication documentation to see what is + * supported. + */ @JsonProperty("git_username") private String gitUsername; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CredentialInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CredentialInfo.java index 2673d2f45..5df03a4b4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CredentialInfo.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/CredentialInfo.java @@ -21,7 +21,14 @@ public class CredentialInfo { @JsonProperty("git_provider") private String gitProvider; - /** Git username. */ + /** + * The username or email provided with your Git provider account, depending on which provider you + * are using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or + * username may be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS + * CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers + * please see your provider's Personal Access Token authentication documentation to see what is + * supported. + */ @JsonProperty("git_username") private String gitUsername; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsAPI.java index 91645ce93..7f5405076 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/GitCredentialsAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -76,7 +77,8 @@ public CredentialInfo get(GetGitCredentialRequest request) { *

Lists the calling user's Git credentials. One credential per user is supported. */ public Iterable list() { - return impl.list().getCredentials(); + return new Paginator<>( + null, (Void v) -> impl.list(), GetCredentialsResponse::getCredentials, response -> null); } public void update(long credentialId) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsAPI.java index 5786ad7e2..3e99279b4 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -41,7 +42,7 @@ public void createScope(String scope) { * Create a new secret scope. * *

The scope name must consist of alphanumeric characters, dashes, underscores, and periods, - * and may not exceed 128 characters. The maximum number of scopes in a workspace is 100. + * and may not exceed 128 characters. */ public void createScope(CreateScope request) { impl.createScope(request); @@ -149,7 +150,7 @@ public Iterable listAcls(String scope) { * if the user does not have permission to make this API call. */ public Iterable listAcls(ListAclsRequest request) { - return impl.listAcls(request).getItems(); + return new Paginator<>(request, impl::listAcls, ListAclsResponse::getItems, response -> null); } /** @@ -160,7 +161,8 @@ public Iterable listAcls(ListAclsRequest request) { *

Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. */ public Iterable listScopes() { - return impl.listScopes().getScopes(); + return new Paginator<>( + null, (Void v) -> impl.listScopes(), ListScopesResponse::getScopes, response -> null); } public Iterable listSecrets(String scope) { @@ -179,7 +181,8 @@ public Iterable listSecrets(String scope) { * user does not have permission to make this API call. */ public Iterable listSecrets(ListSecretsRequest request) { - return impl.listSecrets(request).getSecrets(); + return new Paginator<>( + request, impl::listSecrets, ListSecretsResponse::getSecrets, response -> null); } public void putAcl(String scope, String principal, AclPermission permission) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsService.java index 397dc40dc..697993a9c 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsService.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/SecretsService.java @@ -24,7 +24,7 @@ public interface SecretsService { * Create a new secret scope. * *

The scope name must consist of alphanumeric characters, dashes, underscores, and periods, - * and may not exceed 128 characters. The maximum number of scopes in a workspace is 100. + * and may not exceed 128 characters. */ void createScope(CreateScope createScope); diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentials.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentials.java index 2adb46a9b..74ac9404a 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentials.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/UpdateCredentials.java @@ -20,11 +20,24 @@ public class UpdateCredentials { @JsonProperty("git_provider") private String gitProvider; - /** Git username. */ + /** + * The username or email provided with your Git provider account, depending on which provider you + * are using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or + * username may be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS + * CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers + * please see your provider's Personal Access Token authentication documentation to see what is + * supported. + */ @JsonProperty("git_username") private String gitUsername; - /** The personal access token used to authenticate to the corresponding Git provider. */ + /** + * The personal access token used to authenticate to the corresponding Git provider. For certain + * providers, support may exist for other types of scoped access tokens. [Learn more]. The + * personal access token used to authenticate to the corresponding Git + * + *

[Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html + */ @JsonProperty("personal_access_token") private String personalAccessToken; diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java index 8214fca08..486826182 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/workspace/WorkspaceAPI.java @@ -3,6 +3,7 @@ import com.databricks.sdk.core.ApiClient; import com.databricks.sdk.support.Generated; +import com.databricks.sdk.support.Paginator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -142,7 +143,7 @@ public Iterable list(String path) { * does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. */ public Iterable list(ListWorkspaceRequest request) { - return impl.list(request).getObjects(); + return new Paginator<>(request, impl::list, ListResponse::getObjects, response -> null); } public void mkdirs(String path) { diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/support/Paginator.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/support/Paginator.java index fcb8e3510..47694813c 100644 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/support/Paginator.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/support/Paginator.java @@ -48,11 +48,11 @@ public Paginator( this.itemsFn = itemsFn; this.nextPageFn = nextPageFn; all = outerIterator(); - flipNextPage(request); + flipNextPage(request, true); } - private boolean flipNextPage(RQ request) { - if (request == null) { + private boolean flipNextPage(RQ request, boolean firstRequest) { + if (!firstRequest && request == null) { return false; } response = requestFn.apply(request); @@ -77,7 +77,7 @@ public boolean hasNext() { if (currentPage.hasNext()) { return true; } - return flipNextPage(nextPageFn.apply(response)); + return flipNextPage(nextPageFn.apply(response), false); } @Override diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/VariableArgumentsProvider.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/VariableArgumentsProvider.java new file mode 100644 index 000000000..e39e1a2de --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/VariableArgumentsProvider.java @@ -0,0 +1,48 @@ +package com.databricks.sdk; + +import java.lang.reflect.Field; +import java.util.stream.Stream; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.support.AnnotationConsumer; + +public class VariableArgumentsProvider + implements ArgumentsProvider, AnnotationConsumer { + private String variableName; + + @Override + public Stream provideArguments(ExtensionContext context) { + return context + .getTestClass() + .map(this::getField) + .map(this::getValue) + .orElseThrow(() -> new IllegalArgumentException("Failed to load test arguments")); + } + + @Override + public void accept(VariableSource variableSource) { + variableName = variableSource.value(); + } + + private Field getField(Class clazz) { + try { + return clazz.getDeclaredField(variableName); + } catch (Exception e) { + return null; + } + } + + @SuppressWarnings("unchecked") + private Stream getValue(Field field) { + Object value = null; + try { + field.setAccessible(true); + value = field.get(null); + field.setAccessible(false); + } catch (Exception ignored) { + } + + return value == null ? null : (Stream) value; + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/VariableSource.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/VariableSource.java new file mode 100644 index 000000000..205e90abe --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/VariableSource.java @@ -0,0 +1,13 @@ +package com.databricks.sdk; + +import java.lang.annotation.*; +import org.junit.jupiter.params.provider.ArgumentsSource; + +@Documented +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@ArgumentsSource(VariableArgumentsProvider.class) +public @interface VariableSource { + /** The name of the static field containing the test arguments. */ + String value(); +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/ApiClientTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/ApiClientTest.java index 8bea3bd22..94ac8489a 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/ApiClientTest.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/ApiClientTest.java @@ -1,7 +1,6 @@ package com.databricks.sdk.core; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.*; import com.databricks.sdk.core.error.ApiErrorBody; import com.databricks.sdk.core.error.ErrorDetail; @@ -11,7 +10,8 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; -import java.net.SocketTimeoutException; +import java.net.UnknownHostException; +import java.time.*; import java.util.*; import org.apache.http.impl.EnglishReasonPhraseCatalog; import org.junit.jupiter.api.Test; @@ -62,7 +62,14 @@ private void runApiClientTest( Class clazz, T expectedResponse) { ApiClient client = getApiClient(request, responses); - T response = client.GET(request.getUri().getPath(), clazz, Collections.emptyMap()); + T response; + if (request.getMethod().equals(Request.GET)) { + response = client.GET(request.getUri().getPath(), clazz, Collections.emptyMap()); + } else if (request.getMethod().equals(Request.POST)) { + response = client.POST(request.getUri().getPath(), request, clazz, Collections.emptyMap()); + } else { + throw new IllegalArgumentException("Unsupported method: " + request.getMethod()); + } assertEquals(response, expectedResponse); } @@ -76,15 +83,31 @@ private void runFailingApiClientTest( private T runFailingApiClientTest( Request request, List responses, Class clazz, Class exceptionClass) { ApiClient client = getApiClient(request, responses); - return assertThrows( - exceptionClass, - () -> client.GET(request.getUri().getPath(), clazz, Collections.emptyMap())); + if (request.getMethod().equals(Request.GET)) { + return assertThrows( + exceptionClass, + () -> client.GET(request.getUri().getPath(), clazz, Collections.emptyMap())); + } else if (request.getMethod().equals(Request.POST)) { + return assertThrows( + exceptionClass, + () -> client.POST(request.getUri().getPath(), request, clazz, Collections.emptyMap())); + } else { + throw new IllegalArgumentException("Unsupported method: " + request.getMethod()); + } } private Request getBasicRequest() { return new Request("GET", "http://my.host/api/my/endpoint"); } + private Request getExampleNonIdempotentRequest() { + return new Request("POST", "http://my.host/api/2.0/sql/statements/"); + } + + private Request getExampleIdempotentRequest() { + return new Request("GET", "http://my.host/api/2.0/sql/sessions/"); + } + private SuccessfulResponse getSuccessResponse(Request req) { return new SuccessfulResponse( new Response(req, 200, "OK", Collections.emptyMap(), "{\"key\":\"value\"}")); @@ -100,6 +123,30 @@ private SuccessfulResponse getTooManyRequestsResponse(Request req) { new Response(req, 429, "Too Many Requests", Collections.emptyMap(), (String) null)); } + private SuccessfulResponse getTooManyRequestsResponseWithRetryAfterHeader(Request req) { + return new SuccessfulResponse( + new Response( + req, + 429, + "Too Many Requests", + Collections.singletonMap("retry-after", Collections.singletonList("1")), + (String) null)); + } + + private SuccessfulResponse getTooManyRequestsResponseWithRetryAfterDateHeader(Request req) { + ZoneOffset gmtOffset = ZoneId.of("GMT").getRules().getOffset(Instant.now()); + ZonedDateTime now = ZonedDateTime.now(gmtOffset); + String retryAfterTime = + now.plusSeconds(5).format(java.time.format.DateTimeFormatter.RFC_1123_DATE_TIME); + return new SuccessfulResponse( + new Response( + req, + 429, + "Too Many Requests", + Collections.singletonMap("retry-after", Collections.singletonList(retryAfterTime)), + (String) null)); + } + private SuccessfulResponse getTransientError(Request req, int statusCode, ApiErrorBody body) throws JsonProcessingException { return getTransientError(req, statusCode, mapper.writeValueAsString(body)); @@ -141,7 +188,7 @@ void retry429() { runApiClientTest( req, Arrays.asList( - getTooManyRequestsResponse(req), + getTooManyRequestsResponseWithRetryAfterHeader(req), getTooManyRequestsResponse(req), getSuccessResponse(req)), MyEndpointResponse.class, @@ -154,12 +201,52 @@ void failAfterTooManyRetries() { runFailingApiClientTest( req, Arrays.asList( + getTooManyRequestsResponseWithRetryAfterDateHeader(req), getTooManyRequestsResponse(req), getTooManyRequestsResponse(req), getTooManyRequestsResponse(req), getSuccessResponse(req)), MyEndpointResponse.class, - "Request GET /api/my/endpoint failed after 3 retries"); + "Request GET /api/my/endpoint failed after 4 retries"); + } + + @Test + void checkExponentialBackoffForRetry() { + Request req = getBasicRequest(); + ApiClient client = + getApiClient(req, Collections.singletonList(getTooManyRequestsResponse(req))); + for (int attemptNumber = 1; attemptNumber < 5; attemptNumber++) { + long backoff = client.getBackoffMillis(null, attemptNumber); + int expectedBackoff = Math.min(60000, 1000 * (1 << (attemptNumber - 1))); + assertTrue(backoff >= expectedBackoff); + assertTrue(backoff <= expectedBackoff + 750L); + } + } + + @Test + void failIdempotentRequestAfterTooManyRetries() throws JsonProcessingException { + Request req = getExampleIdempotentRequest(); + + runFailingApiClientTest( + req, + Arrays.asList( + getTooManyRequestsResponse(req), + getTransientError( + req, + 400, + new ApiErrorBody( + "ERROR", + null, + null, + null, + null, + "Workspace 123 does not have any associated worker environments", + null)), + getTooManyRequestsResponse(req), + getTooManyRequestsResponse(req), + getSuccessResponse(req)), + MyEndpointResponse.class, + "Request GET /api/2.0/sql/sessions/ failed after 4 retries"); } @Test @@ -188,7 +275,7 @@ void retryDatabricksApi12RetriableError() throws JsonProcessingException { @Test void errorDetails() throws JsonProcessingException { - Request req = getBasicRequest(); + Request req = getExampleNonIdempotentRequest(); Map metadata = new HashMap<>(); metadata.put("etag", "value"); @@ -249,14 +336,24 @@ void retryDatabricksRetriableError() throws JsonProcessingException { } @Test - void retrySocketTimeoutException() { + void retryUnknownHostException() { Request req = getBasicRequest(); runApiClientTest( req, Arrays.asList( - new Failure(new SocketTimeoutException("Connect timed out")), getSuccessResponse(req)), + new Failure(new UnknownHostException("Connect timed out")), getSuccessResponse(req)), MyEndpointResponse.class, new MyEndpointResponse().setKey("value")); } + + @Test + void testGetBackoffFromRetryAfterHeader() { + Request req = getBasicRequest(); + Response response = getTooManyRequestsResponseWithRetryAfterHeader(req).getResponse(); + assertEquals(Optional.of(1000L), ApiClient.getBackoffFromRetryAfterHeader(response)); + + response = getTooManyRequestsResponse(req).getResponse(); + assertEquals(Optional.empty(), ApiClient.getBackoffFromRetryAfterHeader(response)); + } } diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/DummyHttpClient.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/DummyHttpClient.java index df757c58e..0b7854059 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/DummyHttpClient.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/DummyHttpClient.java @@ -31,10 +31,19 @@ public DummyHttpClient with(Request in, IOException exception) { @Override public Response execute(Request in) throws IOException { - List responses = stub.get(in); + List responses = getResponseList(in); if (responses == null || responses.isEmpty()) { throw new IllegalArgumentException("No mock for " + in); } return responses.remove(0).getResponse(); } + + private List getResponseList(Request in) { + for (Request r : stub.keySet()) { + if (r.getMethod().equals(in.getMethod()) && r.getUrl().equals(in.getUrl())) { + return stub.get(r); + } + } + return null; + } } diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/error/ErrorMapperTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/error/ErrorMapperTest.java new file mode 100644 index 000000000..75d00c16d --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/error/ErrorMapperTest.java @@ -0,0 +1,137 @@ +package com.databricks.sdk.core.error; + +import com.databricks.sdk.VariableSource; +import com.databricks.sdk.core.DatabricksError; +import com.databricks.sdk.core.error.platform.*; +import com.databricks.sdk.core.http.Request; +import com.databricks.sdk.core.http.Response; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.util.stream.Stream; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; + +public class ErrorMapperTest { + static Stream arguments = + Stream.of( + Arguments.of( + InvalidParameterValue.class, + 400, + "{\"error_code\":\"INVALID_PARAMETER_VALUE\",\"message\":\"Invalid parameter value\"}"), + Arguments.of( + BadRequest.class, + 400, + "{\"error_code\":\"BAD_REQUEST\",\"message\":\"the request is invalid\"}"), + Arguments.of( + Unauthenticated.class, + 401, + "{\"error_code\":\"UNAUTHENTICATED\",\"message\":\"the request does not have valid authentication (AuthN) credentials for the operation\"}"), + Arguments.of( + PermissionDenied.class, + 403, + "{\"error_code\":\"PERMISSION_DENIED\",\"message\":\"the caller does not have permission to execute the specified operation\"}"), + Arguments.of( + NotFound.class, + 404, + "{\"error_code\":\"NOT_FOUND\",\"message\":\"the operation was performed on a resource that does not exist\"}"), + Arguments.of( + ResourceConflict.class, + 409, + "{\"error_code\":\"RESOURCE_CONFLICT\",\"message\":\"maps to all HTTP 409 (Conflict) responses\"}"), + Arguments.of( + TooManyRequests.class, + 429, + "{\"error_code\":\"TOO_MANY_REQUESTS\",\"message\":\"maps to HTTP code: 429 Too Many Requests\"}"), + Arguments.of( + Cancelled.class, + 499, + "{\"error_code\":\"CANCELLED\",\"message\":\"the operation was explicitly canceled by the caller\"}"), + Arguments.of( + com.databricks.sdk.core.error.platform.InternalError.class, + 500, + "{\"error_code\":\"INTERNAL_ERROR\",\"message\":\"some invariants expected by the underlying system have been broken\"}"), + Arguments.of( + NotImplemented.class, + 501, + "{\"error_code\":\"NOT_IMPLEMENTED\",\"message\":\"the operation is not implemented or is not supported/enabled in this service\"}"), + Arguments.of( + TemporarilyUnavailable.class, + 503, + "{\"error_code\":\"TEMPORARILY_UNAVAILABLE\",\"message\":\"the service is currently unavailable\"}"), + Arguments.of( + DeadlineExceeded.class, + 504, + "{\"error_code\":\"DEADLINE_EXCEEDED\",\"message\":\"the deadline expired before the operation could complete\"}"), + Arguments.of( + ResourceDoesNotExist.class, + 404, + "{\"error_code\":\"RESOURCE_DOES_NOT_EXIST\",\"message\":\"operation was performed on a resource that does not exist\"}"), + Arguments.of( + Aborted.class, + 409, + "{\"error_code\":\"ABORTED\",\"message\":\"the operation was aborted, typically due to a concurrency issue such as a sequencer check failure\"}"), + Arguments.of( + AlreadyExists.class, + 409, + "{\"error_code\":\"ALREADY_EXISTS\",\"message\":\"operation was rejected due a conflict with an existing resource\"}"), + Arguments.of( + ResourceAlreadyExists.class, + 409, + "{\"error_code\":\"RESOURCE_ALREADY_EXISTS\",\"message\":\"operation was rejected due a conflict with an existing resource\"}"), + Arguments.of( + ResourceExhausted.class, + 429, + "{\"error_code\":\"RESOURCE_EXHAUSTED\",\"message\":\"operation is rejected due to per-user rate limiting\"}"), + Arguments.of( + RequestLimitExceeded.class, + 429, + "{\"error_code\":\"REQUEST_LIMIT_EXCEEDED\",\"message\":\"cluster request was rejected because it would exceed a resource limit\"}"), + Arguments.of( + Unknown.class, + 500, + "{\"error_code\":\"UNKNOWN\",\"message\":\"this error is used as a fallback if the platform-side mapping is missing some reason\"}"), + Arguments.of( + DataLoss.class, + 500, + "{\"error_code\":\"DATA_LOSS\",\"message\":\"unrecoverable data loss or corruption\"}")); + + @ParameterizedTest + @VariableSource("arguments") + void applyMapsErrorsCorrectly(Class expectedClass, int statusCode, String errorBody) + throws JsonProcessingException { + ErrorMapper mapper = new ErrorMapper(); + ApiErrorBody apiErrorBody = new ObjectMapper().readValue(errorBody, ApiErrorBody.class); + Request req = new Request("GET", "/a/b/c"); + Response resp = new Response(req, statusCode, null, null); + DatabricksError error = mapper.apply(resp, apiErrorBody); + assert error.getClass().equals(expectedClass); + } + + static final Stream overrideCases = + Stream.of( + Arguments.of( + ResourceDoesNotExist.class, + "GET", + "https://my.databricks.workspace/api/2.0/clusters/get?cluster_id=123", + 400, + "{\"error_code\":\"INVALID_PARAMETER_VALUE\",\"message\":\"Cluster 123 does not exist\"}"), + Arguments.of( + ResourceDoesNotExist.class, + "GET", + "https://my.databricks.workspace/api/2.1/jobs/get?job_id=123", + 400, + "{\"error_code\":\"INVALID_PARAMETER_VALUE\",\"message\":\"Job 123 does not exist\"}")); + + @ParameterizedTest + @VariableSource("overrideCases") + void applyOverridesErrorsCorrectly( + Class expected, String method, String url, int statusCode, String errorBody) + throws JsonProcessingException { + ErrorMapper mapper = new ErrorMapper(); + ApiErrorBody apiErrorBody = new ObjectMapper().readValue(errorBody, ApiErrorBody.class); + Request req = new Request(method, url); + Response resp = new Response(req, statusCode, null, null); + DatabricksError error = mapper.apply(resp, apiErrorBody); + assert error.getClass().equals(expected); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/http/EncodingTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/http/EncodingTest.java new file mode 100644 index 000000000..05daf5184 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/http/EncodingTest.java @@ -0,0 +1,13 @@ +package com.databricks.sdk.core.http; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.Test; + +public class EncodingTest { + @Test + public void encodeMultiSegmentPathParameter() { + assertEquals("/foo/bar", Encoding.encodeMultiSegmentPathParameter("/foo/bar")); + assertEquals("a%3Fb%23c", Encoding.encodeMultiSegmentPathParameter("a?b#c")); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/retry/IdempotentRequestRetryStrategyTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/retry/IdempotentRequestRetryStrategyTest.java new file mode 100644 index 000000000..8a46d7831 --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/retry/IdempotentRequestRetryStrategyTest.java @@ -0,0 +1,26 @@ +package com.databricks.sdk.core.retry; + +import com.databricks.sdk.core.DatabricksError; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +public class IdempotentRequestRetryStrategyTest { + private static final RetryStrategy RETRY_STRATEGY = new IdempotentRequestRetryStrategy(); + + @Test + public void testIsRetriable() { + DatabricksError databricksError = + new DatabricksError("IO_ERROR", 523, new IllegalArgumentException()); + Assertions.assertFalse(RETRY_STRATEGY.isRetriable(databricksError)); + + databricksError = + new DatabricksError("TOO_MANY_REQUESTS", "Current request has to be retried", 429); + Assertions.assertTrue(RETRY_STRATEGY.isRetriable(databricksError)); + + databricksError = new DatabricksError("BAD_REQUEST", "Bad request", 400); + Assertions.assertFalse(RETRY_STRATEGY.isRetriable(databricksError)); + + databricksError = new DatabricksError("NOT_FOUND", "Not found", 404); + Assertions.assertFalse(RETRY_STRATEGY.isRetriable(databricksError)); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/retry/NonIdempotentRequestRetryStrategyTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/retry/NonIdempotentRequestRetryStrategyTest.java new file mode 100644 index 000000000..9442ef76a --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/retry/NonIdempotentRequestRetryStrategyTest.java @@ -0,0 +1,37 @@ +package com.databricks.sdk.core.retry; + +import com.databricks.sdk.core.DatabricksError; +import java.net.ConnectException; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +public class NonIdempotentRequestRetryStrategyTest { + private static final RetryStrategy RETRY_STRATEGY = new NonIdempotentRequestRetryStrategy(); + + @Test + public void testIsRetriable() { + DatabricksError databricksError = + new DatabricksError("IO_ERROR", 523, new IllegalArgumentException()); + Assertions.assertFalse(RETRY_STRATEGY.isRetriable(databricksError)); + + databricksError = new DatabricksError("IO_ERROR", 523, new ConnectException()); + Assertions.assertTrue(RETRY_STRATEGY.isRetriable(databricksError)); + + databricksError = + new DatabricksError("TOO_MANY_REQUESTS", "Current request has to be retried", 429); + Assertions.assertTrue(RETRY_STRATEGY.isRetriable(databricksError)); + + databricksError = new DatabricksError("BAD_REQUEST", "Bad request", 400); + Assertions.assertFalse(RETRY_STRATEGY.isRetriable(databricksError)); + + databricksError = new DatabricksError("NOT_FOUND", "Not found", 404); + Assertions.assertFalse(RETRY_STRATEGY.isRetriable(databricksError)); + + databricksError = new DatabricksError("SERVICE_UNAVAILABLE", "Service Unavailable", 503); + Assertions.assertTrue(RETRY_STRATEGY.isRetriable(databricksError)); + + databricksError = + new DatabricksError("TRANSIENT_ERROR", "There is no worker environment with id"); + Assertions.assertTrue(RETRY_STRATEGY.isRetriable(databricksError)); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/retry/RequestBasedRetryStrategyPickerTest.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/retry/RequestBasedRetryStrategyPickerTest.java new file mode 100644 index 000000000..946c3840c --- /dev/null +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/retry/RequestBasedRetryStrategyPickerTest.java @@ -0,0 +1,40 @@ +package com.databricks.sdk.core.retry; + +import com.databricks.sdk.core.DatabricksConfig; +import com.databricks.sdk.core.http.Request; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +public class RequestBasedRetryStrategyPickerTest { + private static final String TEST_URL = "https://test.com"; + private static final DatabricksConfig CONFIG = new DatabricksConfig().setHost(TEST_URL); + private static final RetryStrategyPicker RETRY_STRATEGY_PICKER = + new RequestBasedRetryStrategyPicker(CONFIG); + + @Test + public void testGetRetryStrategy() { + Request request = new Request("GET", TEST_URL + "/api/2.0/sql/statements/1"); + RetryStrategy retryStrategy = RETRY_STRATEGY_PICKER.getRetryStrategy(request); + Assertions.assertInstanceOf(IdempotentRequestRetryStrategy.class, retryStrategy); + + request = new Request("POST", TEST_URL + "/api/2.0/sql/sessions/"); + retryStrategy = RETRY_STRATEGY_PICKER.getRetryStrategy(request); + Assertions.assertInstanceOf(IdempotentRequestRetryStrategy.class, retryStrategy); + + request = new Request("DELETE", TEST_URL + "/api/2.0/sql/statements/sessions/1"); + retryStrategy = RETRY_STRATEGY_PICKER.getRetryStrategy(request); + Assertions.assertInstanceOf(IdempotentRequestRetryStrategy.class, retryStrategy); + + request = new Request("GET", TEST_URL + "/api/2.0/sql/statements/1/result/chunks/1"); + retryStrategy = RETRY_STRATEGY_PICKER.getRetryStrategy(request); + Assertions.assertInstanceOf(IdempotentRequestRetryStrategy.class, retryStrategy); + + request = new Request("DELETE", TEST_URL + "/api/2.0/sql/sessions/1"); + retryStrategy = RETRY_STRATEGY_PICKER.getRetryStrategy(request); + Assertions.assertInstanceOf(IdempotentRequestRetryStrategy.class, retryStrategy); + + request = new Request("POST", TEST_URL + "/api/2.0/sql/statements/1"); + retryStrategy = RETRY_STRATEGY_PICKER.getRetryStrategy(request); + Assertions.assertInstanceOf(NonIdempotentRequestRetryStrategy.class, retryStrategy); + } +} diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/utils/FakeTimer.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/utils/FakeTimer.java index 73ba1150b..b17fce685 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/core/utils/FakeTimer.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/core/utils/FakeTimer.java @@ -12,7 +12,7 @@ public FakeTimer(long currentTimeMillis) { } @Override - public void wait(int milliseconds) { + public void sleep(long milliseconds) { currentTimeMillis += milliseconds; } diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/ClustersIT.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/ClustersIT.java index cb482a02d..8dd25d2af 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/ClustersIT.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/ClustersIT.java @@ -3,6 +3,7 @@ import static org.junit.jupiter.api.Assertions.*; import com.databricks.sdk.WorkspaceClient; +import com.databricks.sdk.core.error.platform.ResourceDoesNotExist; import com.databricks.sdk.integration.framework.CollectionUtils; import com.databricks.sdk.integration.framework.EnvContext; import com.databricks.sdk.integration.framework.EnvOrSkip; @@ -47,4 +48,13 @@ void latestRuntime(WorkspaceClient w) { assertNotNull(runtime); } + + @Test + void clusterDoesNotExist(WorkspaceClient w) { + assertThrowsExactly( + ResourceDoesNotExist.class, + () -> { + w.clusters().get("does-not-exist"); + }); + } } diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/FilesIT.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/FilesIT.java index b36641415..bca00213a 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/FilesIT.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/FilesIT.java @@ -28,7 +28,7 @@ void uploadAndDownloadFile(WorkspaceClient workspace) throws IOException { workspace, (volumePath) -> { // Generate a random file name and random contents of 10 KiB. - String fileName = NameUtils.uniqueName(volumePath + "/test"); + String fileName = NameUtils.uniqueName(volumePath + "/test-with-?-and-#"); byte[] fileContents = new byte[1024 * 10]; for (int i = 0; i < fileContents.length; i++) { fileContents[i] = (byte) (i & 0xFF); diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/JobsIT.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/JobsIT.java index 391a480c3..61cf3f30b 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/JobsIT.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/JobsIT.java @@ -1,6 +1,9 @@ package com.databricks.sdk.integration; +import static org.junit.jupiter.api.Assertions.*; + import com.databricks.sdk.WorkspaceClient; +import com.databricks.sdk.core.error.platform.ResourceDoesNotExist; import com.databricks.sdk.integration.framework.CollectionUtils; import com.databricks.sdk.integration.framework.EnvContext; import com.databricks.sdk.integration.framework.EnvTest; @@ -20,4 +23,13 @@ void listsJobs(WorkspaceClient w) { CollectionUtils.assertUnique(all); } + + @Test + void getNonExistingJob(WorkspaceClient w) { + assertThrowsExactly( + ResourceDoesNotExist.class, + () -> { + w.jobs().get(123456789); + }); + } } diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/PrivateAccessIT.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/PrivateAccessIT.java index 41160714c..adbcaae3a 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/PrivateAccessIT.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/PrivateAccessIT.java @@ -11,6 +11,8 @@ @EnvContext("account") @DisabledIfEnvironmentVariable(named = "ARM_CLIENT_ID", matches = ".*") +// VPC Endpoints need to be enabled in our GCP E2 account. +@DisabledIfEnvironmentVariable(named = "GOOGLE_CREDENTIALS", matches = ".*") @ExtendWith(EnvTest.class) public class PrivateAccessIT { @Test diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/QueriesIT.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/QueriesIT.java index 2a9ac427e..d57adaa46 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/QueriesIT.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/QueriesIT.java @@ -14,7 +14,7 @@ class QueriesIT { @Test void listsQueries(WorkspaceClient w) { - Iterable list = w.queries().list(new ListQueriesRequest().setPageSize(2L)); + Iterable list = w.queries().list(new ListQueriesRequest().setPageSize(1000L)); java.util.List all = CollectionUtils.asList(list); diff --git a/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/VpcEndpointsIT.java b/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/VpcEndpointsIT.java index bd7622126..411e2383e 100644 --- a/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/VpcEndpointsIT.java +++ b/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/VpcEndpointsIT.java @@ -11,6 +11,8 @@ @EnvContext("account") @DisabledIfEnvironmentVariable(named = "ARM_CLIENT_ID", matches = ".*") +// VPC Endpoints need to be enabled in our GCP E2 account. +@DisabledIfEnvironmentVariable(named = "GOOGLE_CREDENTIALS", matches = ".*") @ExtendWith(EnvTest.class) public class VpcEndpointsIT { @Test diff --git a/examples/docs/pom.xml b/examples/docs/pom.xml index 43e650ca8..6408b1fdc 100644 --- a/examples/docs/pom.xml +++ b/examples/docs/pom.xml @@ -24,7 +24,7 @@ com.databricks databricks-sdk-java - 0.19.0 + 0.23.0 diff --git a/examples/docs/src/main/java/com/databricks/example/HttpProxyExample.java b/examples/docs/src/main/java/com/databricks/example/HttpProxyExample.java new file mode 100644 index 000000000..8ab8a2360 --- /dev/null +++ b/examples/docs/src/main/java/com/databricks/example/HttpProxyExample.java @@ -0,0 +1,36 @@ +package com.databricks.example; + +import com.databricks.sdk.WorkspaceClient; +import com.databricks.sdk.service.compute.ClusterDetails; +import com.databricks.sdk.service.compute.ListClustersRequest; + +/** + * This example demonstrates how to use the Databricks Java SDK with an HTTP proxy. + * + * To run this example, you must set the following system properties: + * -Dhttps.proxyHost= + * The host name of the HTTP proxy server. + * -Dhttps.proxyPort= + * The port number of the HTTP proxy server. + */ +class HttpProxyExample { + public static void main(String[] args) { + validateProxySettings(); + WorkspaceClient w = new WorkspaceClient(); + + for (ClusterDetails c : w.clusters().list(new ListClustersRequest())) { + System.out.println(c.getClusterName()); + } + } + + private static void validateProxySettings() { + String httpProxyHost = System.getProperty("https.proxyHost"); + String httpProxyPort = System.getProperty("https.proxyPort"); + if (httpProxyHost == null) { + throw new IllegalArgumentException("https.proxyHost must be set"); + } + if (httpProxyPort == null) { + throw new IllegalArgumentException("https.proxyPort must be set"); + } + } +} diff --git a/examples/docs/src/main/java/com/databricks/example/M2MAuthExample.java b/examples/docs/src/main/java/com/databricks/example/M2MAuthExample.java new file mode 100644 index 000000000..60f3e8e9c --- /dev/null +++ b/examples/docs/src/main/java/com/databricks/example/M2MAuthExample.java @@ -0,0 +1,52 @@ +package com.databricks.example; + +import com.databricks.sdk.AccountClient; +import com.databricks.sdk.WorkspaceClient; +import com.databricks.sdk.core.DatabricksConfig; +import com.databricks.sdk.service.compute.ClusterDetails; +import com.databricks.sdk.service.compute.ListClustersRequest; +import com.databricks.sdk.service.provisioning.Workspace; + +/** + Example for authenticating with Databricks Account through CLI. + The authentication type can be set to either "databricks-cli" or "azure-cli". + For details on authenticating via bricks cli, please see: ... + */ +public class M2MAuthExample { + /** + Get config used for authenticating with Databricks. + @return DatabricksConfig object used for authentication + */ + private static DatabricksConfig getConfig() { + return new DatabricksConfig() + .setHost("https://accounts.cloud.databricks.com") + // Fill in your E2 account ID. Click on your username in the top-right corner of the accounts console to + // display your account ID. + .setAccountId("") + // Create a service principal in the Account console at "User Management" -> "Service Principals" and + // click "Create service principal". Generate a secret and paste the client ID and secret below. + .setClientId("") + .setClientSecret(""); + } + + /** + Authenticate and retrieve the list of workspaces from account + */ + public static void main(String[] args) { + DatabricksConfig config = getConfig(); + + AccountClient account = new AccountClient(config); + Workspace firstWorkspace = null; + for (Workspace w : account.workspaces().list()) { + if (w.getDeploymentName().equals("dbc-a39a1eb1-ef95")) { + firstWorkspace = w; + } + System.out.println(w.getWorkspaceName()); + } + + WorkspaceClient w = account.getWorkspaceClient(firstWorkspace); + for (ClusterDetails c : w.clusters().list(new ListClustersRequest())) { + System.out.println(c.getClusterName()); + } + } +} diff --git a/examples/spring-boot-oauth-u2m-demo/pom.xml b/examples/spring-boot-oauth-u2m-demo/pom.xml index cd1a47832..c04588df0 100644 --- a/examples/spring-boot-oauth-u2m-demo/pom.xml +++ b/examples/spring-boot-oauth-u2m-demo/pom.xml @@ -37,7 +37,7 @@ com.databricks databricks-sdk-java - 0.19.0 + 0.23.0 com.fasterxml.jackson.datatype diff --git a/pom.xml b/pom.xml index c5c323635..03a30e865 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ 4.0.0 com.databricks databricks-sdk-parent - 0.19.0 + 0.23.0 pom Databricks SDK for Java The Databricks SDK for Java includes functionality to accelerate development with Java for