diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 4ceeab3d..ffd6f58d 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -d05898328669a3f8ab0c2ecee37db2673d3ea3f7 \ No newline at end of file +6f6b1371e640f2dfeba72d365ac566368656f6b6 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 8c0250f3..94c51cb9 100755 --- a/.gitattributes +++ b/.gitattributes @@ -36,15 +36,16 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionL databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissions.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionsDescription.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppPermissionsRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppState.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppStatus.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ApplicationState.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ApplicationStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeState.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteAppRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppDeploymentRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionLevelsRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/GetAppPermissionLevelsResponse.java linguist-generated=true @@ -56,7 +57,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppsReques databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ListAppsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/StartAppRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/StopAppRequest.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/StopAppResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/UpdateAppRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ActionConfiguration.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/ActionConfigurationType.java linguist-generated=true @@ -142,11 +142,13 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactAll databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactMatcher.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ArtifactType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AssignResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsCredentials.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsIamRoleResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManagedIdentityResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureServicePrincipal.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureUserDelegationSas.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java linguist-generated=true @@ -245,6 +247,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionPar databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/FunctionsService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpOauthToken.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreAssignmentRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequest.java linguist-generated=true @@ -360,6 +365,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QualityMoni databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QualityMonitorsImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QualityMonitorsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/QuotaInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/R2Credentials.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ReadVolumeRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegenerateDashboardRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegenerateDashboardResponse.java linguist-generated=true @@ -397,12 +403,16 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableConstr databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableDependency.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableExistsResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableInfo.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableOperation.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableRowFilter.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableSummary.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesAPI.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TablesService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TriggeredUpdateStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignResponse.java linguist-generated=true @@ -1607,6 +1617,16 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Worksp databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/Ai21LabsConfig.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayConfig.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailParameters.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailPiiBehavior.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrailPiiBehaviorBehavior.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayGuardrails.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayInferenceTableConfig.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimit.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimitKey.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimitRenewalPeriod.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayUsageTrackingConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AmazonBedrockConfigBedrockProvider.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AnthropicConfig.java linguist-generated=true @@ -1653,6 +1673,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/OpenAiConfi databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PaLmConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PatchServingEndpointTags.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PayloadTable.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutAiGatewayResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/PutResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/QueryEndpointInput.java linguist-generated=true @@ -1701,6 +1723,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticC databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateImpl.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AutomaticClusterUpdateSetting.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/BooleanMessage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessage.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageEnablementDetails.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/ClusterAutoRestartMessageMaintenanceWindow.java linguist-generated=true @@ -1740,6 +1763,10 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DefaultNam databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteAccountIpAccessListRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultNamespaceSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDefaultNamespaceSettingResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessResponse.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyFeaturesRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyFeaturesResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteIpAccessListRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkConnectivityConfigurationRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteNetworkConnectivityConfigurationResponse.java linguist-generated=true @@ -1752,6 +1779,14 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteRest databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteRestrictWorkspaceAdminsSettingResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteTokenManagementRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DestinationType.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccess.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessService.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeatures.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesAPI.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesImpl.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesService.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EmailConfig.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/Empty.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/EnhancedSecurityMonitoring.java linguist-generated=true @@ -1774,6 +1809,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetAutomat databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetComplianceSecurityProfileSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetCspEnablementAccountSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDefaultNamespaceSettingRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyAccessRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyFeaturesRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetEnhancedSecurityMonitoringSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetEsmEnablementAccountSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetIpAccessListRequest.java linguist-generated=true @@ -1865,6 +1902,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateAuto databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateComplianceSecurityProfileSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateCspEnablementAccountSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDefaultNamespaceSettingRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyAccessRequest.java linguist-generated=true +databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyFeaturesRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEnhancedSecurityMonitoringSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateEsmEnablementAccountSettingRequest.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateIpAccessList.java linguist-generated=true @@ -1982,7 +2021,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CancelExecution databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Channel.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ClientCallContext.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ColumnInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ColumnInfoTypeName.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/CreateAlert.java linguist-generated=true @@ -2035,8 +2073,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseRe databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseRequestWarehouseType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EditWarehouseResponse.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/Empty.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EncodedText.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EncodedTextEncoding.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointConfPair.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointHealth.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EndpointInfo.java linguist-generated=true @@ -2115,11 +2151,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryMetrics.ja databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryOptions.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryParameter.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryPostContent.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QuerySource.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QuerySourceDriverInfo.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QuerySourceEntryPoint.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QuerySourceJobManager.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QuerySourceTrigger.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryStatementType.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryStatus.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryVisualizationsAPI.java linguist-generated=true @@ -2137,7 +2168,6 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultManifest. databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ResultSchema.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RunAsMode.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/RunAsRole.java linguist-generated=true -databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ServerlessChannelInfo.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ServiceError.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ServiceErrorCode.java linguist-generated=true databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/SetRequest.java linguist-generated=true diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java index 8bdf7119..c9a11f3f 100755 --- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java +++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java @@ -44,6 +44,8 @@ import com.databricks.sdk.service.catalog.TableConstraintsService; import com.databricks.sdk.service.catalog.TablesAPI; import com.databricks.sdk.service.catalog.TablesService; +import com.databricks.sdk.service.catalog.TemporaryTableCredentialsAPI; +import com.databricks.sdk.service.catalog.TemporaryTableCredentialsService; import com.databricks.sdk.service.catalog.VolumesAPI; import com.databricks.sdk.service.catalog.VolumesService; import com.databricks.sdk.service.catalog.WorkspaceBindingsAPI; @@ -270,6 +272,7 @@ public class WorkspaceClient { private SystemSchemasAPI systemSchemasAPI; private TableConstraintsAPI tableConstraintsAPI; private TablesAPI tablesAPI; + private TemporaryTableCredentialsAPI temporaryTableCredentialsAPI; private TokenManagementAPI tokenManagementAPI; private TokensAPI tokensAPI; private UsersAPI usersAPI; @@ -368,6 +371,7 @@ public WorkspaceClient(DatabricksConfig config) { systemSchemasAPI = new SystemSchemasAPI(apiClient); tableConstraintsAPI = new TableConstraintsAPI(apiClient); tablesAPI = new TablesAPI(apiClient); + temporaryTableCredentialsAPI = new TemporaryTableCredentialsAPI(apiClient); tokenManagementAPI = new TokenManagementAPI(apiClient); tokensAPI = new TokensAPI(apiClient); usersAPI = new UsersAPI(apiClient); @@ -1451,7 +1455,9 @@ public SharesAPI shares() { * timeouts are approximate, occur server-side, and cannot account for things such as caller * delays and network latency from caller to service. - The system will auto-close a statement * after one hour if the client stops polling and thus you must poll at least once an hour. - The - * results are only available for one hour after success; polling does not extend this. + * results are only available for one hour after success; polling does not extend this. - The SQL + * Execution API must be used for the entire lifecycle of the statement. For example, you cannot + * use the Jobs API to execute the command, and then the SQL Execution API to cancel it. * *
[Apache Arrow Columnar]: https://arrow.apache.org/overview/ [Databricks SQL Statement
* Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html
@@ -1517,6 +1523,25 @@ public TablesAPI tables() {
return tablesAPI;
}
+ /**
+ * Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud
+ * storage locationswhere table data is stored in Databricks. These credentials are employed to
+ * provide secure and time-limitedaccess to data in cloud environments such as AWS, Azure, and
+ * Google Cloud. Each cloud provider has its own typeof credentials: AWS uses temporary session
+ * tokens via AWS Security Token Service (STS), Azure utilizesShared Access Signatures (SAS) for
+ * its data storage services, and Google Cloud supports temporary credentialsthrough OAuth
+ * 2.0.Temporary table credentials ensure that data access is limited in scope and duration,
+ * reducing the risk ofunauthorized access or misuse. To use the temporary table credentials API,
+ * a metastore admin needs to enable the external_access_enabled flag (off by default) at the
+ * metastore level, and user needs to be granted the EXTERNAL USE SCHEMA permission at the schema
+ * level by catalog admin. Note that EXTERNAL USE SCHEMA is a schema level permission that can
+ * only be granted by catalog admin explicitly and is not included in schema ownership or ALL
+ * PRIVILEGES on the schema for security reason.
+ */
+ public TemporaryTableCredentialsAPI temporaryTableCredentials() {
+ return temporaryTableCredentialsAPI;
+ }
+
/**
* Enables administrators to get all tokens and delete tokens for other users. Admins can either
* get every token, get a specific token by ID, or get all tokens for a particular user.
@@ -2531,6 +2556,20 @@ public WorkspaceClient withTablesAPI(TablesAPI tables) {
return this;
}
+ /** Replace the default TemporaryTableCredentialsService with a custom implementation. */
+ public WorkspaceClient withTemporaryTableCredentialsImpl(
+ TemporaryTableCredentialsService temporaryTableCredentials) {
+ return this.withTemporaryTableCredentialsAPI(
+ new TemporaryTableCredentialsAPI(temporaryTableCredentials));
+ }
+
+ /** Replace the default TemporaryTableCredentialsAPI with a custom implementation. */
+ public WorkspaceClient withTemporaryTableCredentialsAPI(
+ TemporaryTableCredentialsAPI temporaryTableCredentials) {
+ this.temporaryTableCredentialsAPI = temporaryTableCredentials;
+ return this;
+ }
+
/** Replace the default TokenManagementService with a custom implementation. */
public WorkspaceClient withTokenManagementImpl(TokenManagementService tokenManagement) {
return this.withTokenManagementAPI(new TokenManagementAPI(tokenManagement));
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java
index 16e2dec4..77357102 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/App.java
@@ -13,6 +13,14 @@ public class App {
@JsonProperty("active_deployment")
private AppDeployment activeDeployment;
+ /** */
+ @JsonProperty("app_status")
+ private ApplicationStatus appStatus;
+
+ /** */
+ @JsonProperty("compute_status")
+ private ComputeStatus computeStatus;
+
/** The creation time of the app. Formatted timestamp in ISO 6801. */
@JsonProperty("create_time")
private String createTime;
@@ -44,10 +52,6 @@ public class App {
@JsonProperty("service_principal_name")
private String servicePrincipalName;
- /** */
- @JsonProperty("status")
- private AppStatus status;
-
/** The update time of the app. Formatted timestamp in ISO 6801. */
@JsonProperty("update_time")
private String updateTime;
@@ -69,6 +73,24 @@ public AppDeployment getActiveDeployment() {
return activeDeployment;
}
+ public App setAppStatus(ApplicationStatus appStatus) {
+ this.appStatus = appStatus;
+ return this;
+ }
+
+ public ApplicationStatus getAppStatus() {
+ return appStatus;
+ }
+
+ public App setComputeStatus(ComputeStatus computeStatus) {
+ this.computeStatus = computeStatus;
+ return this;
+ }
+
+ public ComputeStatus getComputeStatus() {
+ return computeStatus;
+ }
+
public App setCreateTime(String createTime) {
this.createTime = createTime;
return this;
@@ -132,15 +154,6 @@ public String getServicePrincipalName() {
return servicePrincipalName;
}
- public App setStatus(AppStatus status) {
- this.status = status;
- return this;
- }
-
- public AppStatus getStatus() {
- return status;
- }
-
public App setUpdateTime(String updateTime) {
this.updateTime = updateTime;
return this;
@@ -174,6 +187,8 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
App that = (App) o;
return Objects.equals(activeDeployment, that.activeDeployment)
+ && Objects.equals(appStatus, that.appStatus)
+ && Objects.equals(computeStatus, that.computeStatus)
&& Objects.equals(createTime, that.createTime)
&& Objects.equals(creator, that.creator)
&& Objects.equals(description, that.description)
@@ -181,7 +196,6 @@ public boolean equals(Object o) {
&& Objects.equals(pendingDeployment, that.pendingDeployment)
&& Objects.equals(servicePrincipalId, that.servicePrincipalId)
&& Objects.equals(servicePrincipalName, that.servicePrincipalName)
- && Objects.equals(status, that.status)
&& Objects.equals(updateTime, that.updateTime)
&& Objects.equals(updater, that.updater)
&& Objects.equals(url, that.url);
@@ -191,6 +205,8 @@ public boolean equals(Object o) {
public int hashCode() {
return Objects.hash(
activeDeployment,
+ appStatus,
+ computeStatus,
createTime,
creator,
description,
@@ -198,7 +214,6 @@ public int hashCode() {
pendingDeployment,
servicePrincipalId,
servicePrincipalName,
- status,
updateTime,
updater,
url);
@@ -208,6 +223,8 @@ public int hashCode() {
public String toString() {
return new ToStringer(App.class)
.add("activeDeployment", activeDeployment)
+ .add("appStatus", appStatus)
+ .add("computeStatus", computeStatus)
.add("createTime", createTime)
.add("creator", creator)
.add("description", description)
@@ -215,7 +232,6 @@ public String toString() {
.add("pendingDeployment", pendingDeployment)
.add("servicePrincipalId", servicePrincipalId)
.add("servicePrincipalName", servicePrincipalName)
- .add("status", status)
.add("updateTime", updateTime)
.add("updater", updater)
.add("url", url)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeploymentState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeploymentState.java
index 8142ad13..3c42359c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeploymentState.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppDeploymentState.java
@@ -6,8 +6,8 @@
@Generated
public enum AppDeploymentState {
+ CANCELLED,
FAILED,
IN_PROGRESS,
- STOPPED,
SUCCEEDED,
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EncodedTextEncoding.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ApplicationState.java
similarity index 50%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EncodedTextEncoding.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ApplicationState.java
index 37eb769a..055c9457 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EncodedTextEncoding.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ApplicationState.java
@@ -1,12 +1,13 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-package com.databricks.sdk.service.sql;
+package com.databricks.sdk.service.apps;
import com.databricks.sdk.support.Generated;
-/** Carry text data in different form. */
@Generated
-public enum EncodedTextEncoding {
- BASE64,
- PLAIN,
+public enum ApplicationState {
+ CRASHED,
+ DEPLOYING,
+ RUNNING,
+ UNAVAILABLE,
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ApplicationStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ApplicationStatus.java
new file mode 100755
index 00000000..e86a89b1
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ApplicationStatus.java
@@ -0,0 +1,58 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.apps;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class ApplicationStatus {
+ /** Application status message */
+ @JsonProperty("message")
+ private String message;
+
+ /** State of the application. */
+ @JsonProperty("state")
+ private ApplicationState state;
+
+ public ApplicationStatus setMessage(String message) {
+ this.message = message;
+ return this;
+ }
+
+ public String getMessage() {
+ return message;
+ }
+
+ public ApplicationStatus setState(ApplicationState state) {
+ this.state = state;
+ return this;
+ }
+
+ public ApplicationState getState() {
+ return state;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ApplicationStatus that = (ApplicationStatus) o;
+ return Objects.equals(message, that.message) && Objects.equals(state, that.state);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(message, state);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ApplicationStatus.class)
+ .add("message", message)
+ .add("state", state)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java
index 04ebf9ff..35d1b609 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppsAPI.java
@@ -33,23 +33,71 @@ public AppsAPI(AppsService mock) {
impl = mock;
}
- public App waitGetAppIdle(String name) throws TimeoutException {
- return waitGetAppIdle(name, Duration.ofMinutes(20), null);
+ public App waitGetAppActive(String name) throws TimeoutException {
+ return waitGetAppActive(name, Duration.ofMinutes(20), null);
}
- public App waitGetAppIdle(String name, Duration timeout, Consumer Deletes an app.
*/
- public void delete(DeleteAppRequest request) {
- impl.delete(request);
+ public App delete(DeleteAppRequest request) {
+ return impl.delete(request);
}
- public Wait Start the last active deployment of the app in the workspace.
*/
- public Wait Stops the active deployment of the app in the workspace.
*/
- public void stop(StopAppRequest request) {
- impl.stop(request);
+ public Wait Deletes an app.
*/
- void delete(DeleteAppRequest deleteAppRequest);
+ App delete(DeleteAppRequest deleteAppRequest);
/**
* Create an app deployment.
@@ -89,14 +89,14 @@ GetAppPermissionLevelsResponse getPermissionLevels(
*
* Start the last active deployment of the app in the workspace.
*/
- AppDeployment start(StartAppRequest startAppRequest);
+ App start(StartAppRequest startAppRequest);
/**
* Stop an app.
*
* Stops the active deployment of the app in the workspace.
*/
- void stop(StopAppRequest stopAppRequest);
+ App stop(StopAppRequest stopAppRequest);
/**
* Update an app.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeState.java
similarity index 74%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppState.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeState.java
index a0606e01..e3622244 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppState.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeState.java
@@ -5,12 +5,12 @@
import com.databricks.sdk.support.Generated;
@Generated
-public enum AppState {
- CREATING,
- DELETED,
+public enum ComputeState {
+ ACTIVE,
DELETING,
ERROR,
- IDLE,
- RUNNING,
STARTING,
+ STOPPED,
+ STOPPING,
+ UPDATING,
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeStatus.java
similarity index 67%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppStatus.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeStatus.java
index e6b52074..0af20eb5 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/AppStatus.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/ComputeStatus.java
@@ -8,16 +8,16 @@
import java.util.Objects;
@Generated
-public class AppStatus {
- /** Message corresponding with the app state. */
+public class ComputeStatus {
+ /** Compute status message */
@JsonProperty("message")
private String message;
- /** State of the app. */
+ /** State of the app compute. */
@JsonProperty("state")
- private AppState state;
+ private ComputeState state;
- public AppStatus setMessage(String message) {
+ public ComputeStatus setMessage(String message) {
this.message = message;
return this;
}
@@ -26,12 +26,12 @@ public String getMessage() {
return message;
}
- public AppStatus setState(AppState state) {
+ public ComputeStatus setState(ComputeState state) {
this.state = state;
return this;
}
- public AppState getState() {
+ public ComputeState getState() {
return state;
}
@@ -39,7 +39,7 @@ public AppState getState() {
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
- AppStatus that = (AppStatus) o;
+ ComputeStatus that = (ComputeStatus) o;
return Objects.equals(message, that.message) && Objects.equals(state, that.state);
}
@@ -50,6 +50,9 @@ public int hashCode() {
@Override
public String toString() {
- return new ToStringer(AppStatus.class).add("message", message).add("state", state).toString();
+ return new ToStringer(ComputeStatus.class)
+ .add("message", message)
+ .add("state", state)
+ .toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java
index 98d03251..3952d58b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/CreateAppDeploymentRequest.java
@@ -13,6 +13,10 @@ public class CreateAppDeploymentRequest {
/** The name of the app. */
@JsonIgnore private String appName;
+ /** The unique id of the deployment. */
+ @JsonProperty("deployment_id")
+ private String deploymentId;
+
/** The mode of which the deployment will manage the source code. */
@JsonProperty("mode")
private AppDeploymentMode mode;
@@ -36,6 +40,15 @@ public String getAppName() {
return appName;
}
+ public CreateAppDeploymentRequest setDeploymentId(String deploymentId) {
+ this.deploymentId = deploymentId;
+ return this;
+ }
+
+ public String getDeploymentId() {
+ return deploymentId;
+ }
+
public CreateAppDeploymentRequest setMode(AppDeploymentMode mode) {
this.mode = mode;
return this;
@@ -60,19 +73,21 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
CreateAppDeploymentRequest that = (CreateAppDeploymentRequest) o;
return Objects.equals(appName, that.appName)
+ && Objects.equals(deploymentId, that.deploymentId)
&& Objects.equals(mode, that.mode)
&& Objects.equals(sourceCodePath, that.sourceCodePath);
}
@Override
public int hashCode() {
- return Objects.hash(appName, mode, sourceCodePath);
+ return Objects.hash(appName, deploymentId, mode, sourceCodePath);
}
@Override
public String toString() {
return new ToStringer(CreateAppDeploymentRequest.class)
.add("appName", appName)
+ .add("deploymentId", deploymentId)
.add("mode", mode)
.add("sourceCodePath", sourceCodePath)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteResponse.java
deleted file mode 100755
index 50d230ef..00000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/DeleteResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.apps;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class DeleteResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(DeleteResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/StopAppResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/StopAppResponse.java
deleted file mode 100755
index 9e862a01..00000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/apps/StopAppResponse.java
+++ /dev/null
@@ -1,28 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.apps;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import java.util.Objects;
-
-@Generated
-public class StopAppResponse {
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- return true;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash();
- }
-
- @Override
- public String toString() {
- return new ToStringer(StopAppResponse.class).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsCredentials.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsCredentials.java
new file mode 100755
index 00000000..84389456
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AwsCredentials.java
@@ -0,0 +1,96 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/**
+ * AWS temporary credentials for API authentication. Read more at
+ * https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html.
+ */
+@Generated
+public class AwsCredentials {
+ /** The access key ID that identifies the temporary credentials. */
+ @JsonProperty("access_key_id")
+ private String accessKeyId;
+
+ /**
+ * The Amazon Resource Name (ARN) of the S3 access point for temporary credentials related the
+ * external location.
+ */
+ @JsonProperty("access_point")
+ private String accessPoint;
+
+ /** The secret access key that can be used to sign AWS API requests. */
+ @JsonProperty("secret_access_key")
+ private String secretAccessKey;
+
+ /** The token that users must pass to AWS API to use the temporary credentials. */
+ @JsonProperty("session_token")
+ private String sessionToken;
+
+ public AwsCredentials setAccessKeyId(String accessKeyId) {
+ this.accessKeyId = accessKeyId;
+ return this;
+ }
+
+ public String getAccessKeyId() {
+ return accessKeyId;
+ }
+
+ public AwsCredentials setAccessPoint(String accessPoint) {
+ this.accessPoint = accessPoint;
+ return this;
+ }
+
+ public String getAccessPoint() {
+ return accessPoint;
+ }
+
+ public AwsCredentials setSecretAccessKey(String secretAccessKey) {
+ this.secretAccessKey = secretAccessKey;
+ return this;
+ }
+
+ public String getSecretAccessKey() {
+ return secretAccessKey;
+ }
+
+ public AwsCredentials setSessionToken(String sessionToken) {
+ this.sessionToken = sessionToken;
+ return this;
+ }
+
+ public String getSessionToken() {
+ return sessionToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AwsCredentials that = (AwsCredentials) o;
+ return Objects.equals(accessKeyId, that.accessKeyId)
+ && Objects.equals(accessPoint, that.accessPoint)
+ && Objects.equals(secretAccessKey, that.secretAccessKey)
+ && Objects.equals(sessionToken, that.sessionToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(accessKeyId, accessPoint, secretAccessKey, sessionToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AwsCredentials.class)
+ .add("accessKeyId", accessKeyId)
+ .add("accessPoint", accessPoint)
+ .add("secretAccessKey", secretAccessKey)
+ .add("sessionToken", sessionToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureUserDelegationSas.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureUserDelegationSas.java
new file mode 100755
index 00000000..e3db75df
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureUserDelegationSas.java
@@ -0,0 +1,46 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/**
+ * Azure temporary credentials for API authentication. Read more at
+ * https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas
+ */
+@Generated
+public class AzureUserDelegationSas {
+ /** The signed URI (SAS Token) used to access blob services for a given path */
+ @JsonProperty("sas_token")
+ private String sasToken;
+
+ public AzureUserDelegationSas setSasToken(String sasToken) {
+ this.sasToken = sasToken;
+ return this;
+ }
+
+ public String getSasToken() {
+ return sasToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AzureUserDelegationSas that = (AzureUserDelegationSas) o;
+ return Objects.equals(sasToken, that.sasToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(sasToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AzureUserDelegationSas.class).add("sasToken", sasToken).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpOauthToken.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpOauthToken.java
new file mode 100755
index 00000000..6955e7a2
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GcpOauthToken.java
@@ -0,0 +1,46 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/**
+ * GCP temporary credentials for API authentication. Read more at
+ * https://developers.google.com/identity/protocols/oauth2/service-account
+ */
+@Generated
+public class GcpOauthToken {
+ /** */
+ @JsonProperty("oauth_token")
+ private String oauthToken;
+
+ public GcpOauthToken setOauthToken(String oauthToken) {
+ this.oauthToken = oauthToken;
+ return this;
+ }
+
+ public String getOauthToken() {
+ return oauthToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GcpOauthToken that = (GcpOauthToken) o;
+ return Objects.equals(oauthToken, that.oauthToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(oauthToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GcpOauthToken.class).add("oauthToken", oauthToken).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialRequest.java
new file mode 100755
index 00000000..250720e3
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialRequest.java
@@ -0,0 +1,62 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class GenerateTemporaryTableCredentialRequest {
+ /**
+ * The operation performed against the table data, either READ or READ_WRITE. If READ_WRITE is
+ * specified, the credentials returned will have write permissions, otherwise, it will be read
+ * only.
+ */
+ @JsonProperty("operation")
+ private TableOperation operation;
+
+ /** UUID of the table to read or write. */
+ @JsonProperty("table_id")
+ private String tableId;
+
+ public GenerateTemporaryTableCredentialRequest setOperation(TableOperation operation) {
+ this.operation = operation;
+ return this;
+ }
+
+ public TableOperation getOperation() {
+ return operation;
+ }
+
+ public GenerateTemporaryTableCredentialRequest setTableId(String tableId) {
+ this.tableId = tableId;
+ return this;
+ }
+
+ public String getTableId() {
+ return tableId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenerateTemporaryTableCredentialRequest that = (GenerateTemporaryTableCredentialRequest) o;
+ return Objects.equals(operation, that.operation) && Objects.equals(tableId, that.tableId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(operation, tableId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenerateTemporaryTableCredentialRequest.class)
+ .add("operation", operation)
+ .add("tableId", tableId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java
new file mode 100755
index 00000000..76e7ca20
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java
@@ -0,0 +1,143 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class GenerateTemporaryTableCredentialResponse {
+ /**
+ * AWS temporary credentials for API authentication. Read more at
+ * https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html.
+ */
+ @JsonProperty("aws_temp_credentials")
+ private AwsCredentials awsTempCredentials;
+
+ /**
+ * Azure temporary credentials for API authentication. Read more at
+ * https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas
+ */
+ @JsonProperty("azure_user_delegation_sas")
+ private AzureUserDelegationSas azureUserDelegationSas;
+
+ /**
+ * Server time when the credential will expire, in unix epoch milliseconds since January 1, 1970
+ * at 00:00:00 UTC. The API client is advised to cache the credential given this expiration time.
+ */
+ @JsonProperty("expiration_time")
+ private Long expirationTime;
+
+ /**
+ * GCP temporary credentials for API authentication. Read more at
+ * https://developers.google.com/identity/protocols/oauth2/service-account
+ */
+ @JsonProperty("gcp_oauth_token")
+ private GcpOauthToken gcpOauthToken;
+
+ /**
+ * R2 temporary credentials for API authentication. Read more at
+ * https://developers.cloudflare.com/r2/api/s3/tokens/.
+ */
+ @JsonProperty("r2_temp_credentials")
+ private R2Credentials r2TempCredentials;
+
+ /** The URL of the storage path accessible by the temporary credential. */
+ @JsonProperty("url")
+ private String url;
+
+ public GenerateTemporaryTableCredentialResponse setAwsTempCredentials(
+ AwsCredentials awsTempCredentials) {
+ this.awsTempCredentials = awsTempCredentials;
+ return this;
+ }
+
+ public AwsCredentials getAwsTempCredentials() {
+ return awsTempCredentials;
+ }
+
+ public GenerateTemporaryTableCredentialResponse setAzureUserDelegationSas(
+ AzureUserDelegationSas azureUserDelegationSas) {
+ this.azureUserDelegationSas = azureUserDelegationSas;
+ return this;
+ }
+
+ public AzureUserDelegationSas getAzureUserDelegationSas() {
+ return azureUserDelegationSas;
+ }
+
+ public GenerateTemporaryTableCredentialResponse setExpirationTime(Long expirationTime) {
+ this.expirationTime = expirationTime;
+ return this;
+ }
+
+ public Long getExpirationTime() {
+ return expirationTime;
+ }
+
+ public GenerateTemporaryTableCredentialResponse setGcpOauthToken(GcpOauthToken gcpOauthToken) {
+ this.gcpOauthToken = gcpOauthToken;
+ return this;
+ }
+
+ public GcpOauthToken getGcpOauthToken() {
+ return gcpOauthToken;
+ }
+
+ public GenerateTemporaryTableCredentialResponse setR2TempCredentials(
+ R2Credentials r2TempCredentials) {
+ this.r2TempCredentials = r2TempCredentials;
+ return this;
+ }
+
+ public R2Credentials getR2TempCredentials() {
+ return r2TempCredentials;
+ }
+
+ public GenerateTemporaryTableCredentialResponse setUrl(String url) {
+ this.url = url;
+ return this;
+ }
+
+ public String getUrl() {
+ return url;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenerateTemporaryTableCredentialResponse that = (GenerateTemporaryTableCredentialResponse) o;
+ return Objects.equals(awsTempCredentials, that.awsTempCredentials)
+ && Objects.equals(azureUserDelegationSas, that.azureUserDelegationSas)
+ && Objects.equals(expirationTime, that.expirationTime)
+ && Objects.equals(gcpOauthToken, that.gcpOauthToken)
+ && Objects.equals(r2TempCredentials, that.r2TempCredentials)
+ && Objects.equals(url, that.url);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ awsTempCredentials,
+ azureUserDelegationSas,
+ expirationTime,
+ gcpOauthToken,
+ r2TempCredentials,
+ url);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenerateTemporaryTableCredentialResponse.class)
+ .add("awsTempCredentials", awsTempCredentials)
+ .add("azureUserDelegationSas", azureUserDelegationSas)
+ .add("expirationTime", expirationTime)
+ .add("gcpOauthToken", gcpOauthToken)
+ .add("r2TempCredentials", r2TempCredentials)
+ .add("url", url)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponse.java
index fb84071f..34e138f1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetMetastoreSummaryResponse.java
@@ -40,6 +40,10 @@ public class GetMetastoreSummaryResponse {
@JsonProperty("delta_sharing_scope")
private GetMetastoreSummaryResponseDeltaSharingScope deltaSharingScope;
+ /** Whether to allow non-DBR clients to directly access entities under the metastore. */
+ @JsonProperty("external_access_enabled")
+ private Boolean externalAccessEnabled;
+
/**
* Globally unique metastore ID across clouds and regions, of the form
* `cloud:region:metastore_id`.
@@ -154,6 +158,15 @@ public GetMetastoreSummaryResponseDeltaSharingScope getDeltaSharingScope() {
return deltaSharingScope;
}
+ public GetMetastoreSummaryResponse setExternalAccessEnabled(Boolean externalAccessEnabled) {
+ this.externalAccessEnabled = externalAccessEnabled;
+ return this;
+ }
+
+ public Boolean getExternalAccessEnabled() {
+ return externalAccessEnabled;
+ }
+
public GetMetastoreSummaryResponse setGlobalMetastoreId(String globalMetastoreId) {
this.globalMetastoreId = globalMetastoreId;
return this;
@@ -268,6 +281,7 @@ public boolean equals(Object o) {
deltaSharingRecipientTokenLifetimeInSeconds,
that.deltaSharingRecipientTokenLifetimeInSeconds)
&& Objects.equals(deltaSharingScope, that.deltaSharingScope)
+ && Objects.equals(externalAccessEnabled, that.externalAccessEnabled)
&& Objects.equals(globalMetastoreId, that.globalMetastoreId)
&& Objects.equals(metastoreId, that.metastoreId)
&& Objects.equals(name, that.name)
@@ -291,6 +305,7 @@ public int hashCode() {
deltaSharingOrganizationName,
deltaSharingRecipientTokenLifetimeInSeconds,
deltaSharingScope,
+ externalAccessEnabled,
globalMetastoreId,
metastoreId,
name,
@@ -316,6 +331,7 @@ public String toString() {
"deltaSharingRecipientTokenLifetimeInSeconds",
deltaSharingRecipientTokenLifetimeInSeconds)
.add("deltaSharingScope", deltaSharingScope)
+ .add("externalAccessEnabled", externalAccessEnabled)
.add("globalMetastoreId", globalMetastoreId)
.add("metastoreId", metastoreId)
.add("name", name)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequest.java
index 08650bb0..19d8e5f0 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetTableRequest.java
@@ -27,6 +27,11 @@ public class GetTableRequest {
@QueryParam("include_delta_metadata")
private Boolean includeDeltaMetadata;
+ /** Whether to include a manifest containing capabilities the table has. */
+ @JsonIgnore
+ @QueryParam("include_manifest_capabilities")
+ private Boolean includeManifestCapabilities;
+
public GetTableRequest setFullName(String fullName) {
this.fullName = fullName;
return this;
@@ -54,6 +59,15 @@ public Boolean getIncludeDeltaMetadata() {
return includeDeltaMetadata;
}
+ public GetTableRequest setIncludeManifestCapabilities(Boolean includeManifestCapabilities) {
+ this.includeManifestCapabilities = includeManifestCapabilities;
+ return this;
+ }
+
+ public Boolean getIncludeManifestCapabilities() {
+ return includeManifestCapabilities;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
@@ -61,12 +75,13 @@ public boolean equals(Object o) {
GetTableRequest that = (GetTableRequest) o;
return Objects.equals(fullName, that.fullName)
&& Objects.equals(includeBrowse, that.includeBrowse)
- && Objects.equals(includeDeltaMetadata, that.includeDeltaMetadata);
+ && Objects.equals(includeDeltaMetadata, that.includeDeltaMetadata)
+ && Objects.equals(includeManifestCapabilities, that.includeManifestCapabilities);
}
@Override
public int hashCode() {
- return Objects.hash(fullName, includeBrowse, includeDeltaMetadata);
+ return Objects.hash(fullName, includeBrowse, includeDeltaMetadata, includeManifestCapabilities);
}
@Override
@@ -75,6 +90,7 @@ public String toString() {
.add("fullName", fullName)
.add("includeBrowse", includeBrowse)
.add("includeDeltaMetadata", includeDeltaMetadata)
+ .add("includeManifestCapabilities", includeManifestCapabilities)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSummariesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSummariesRequest.java
index 02a13979..0aa5904f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSummariesRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListSummariesRequest.java
@@ -16,6 +16,11 @@ public class ListSummariesRequest {
@QueryParam("catalog_name")
private String catalogName;
+ /** Whether to include a manifest containing capabilities the table has. */
+ @JsonIgnore
+ @QueryParam("include_manifest_capabilities")
+ private Boolean includeManifestCapabilities;
+
/**
* Maximum number of summaries for tables to return. If not set, the page length is set to a
* server configured value (10000, as of 1/5/2024). - when set to a value greater than 0, the page
@@ -56,6 +61,15 @@ public String getCatalogName() {
return catalogName;
}
+ public ListSummariesRequest setIncludeManifestCapabilities(Boolean includeManifestCapabilities) {
+ this.includeManifestCapabilities = includeManifestCapabilities;
+ return this;
+ }
+
+ public Boolean getIncludeManifestCapabilities() {
+ return includeManifestCapabilities;
+ }
+
public ListSummariesRequest setMaxResults(Long maxResults) {
this.maxResults = maxResults;
return this;
@@ -98,6 +112,7 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
ListSummariesRequest that = (ListSummariesRequest) o;
return Objects.equals(catalogName, that.catalogName)
+ && Objects.equals(includeManifestCapabilities, that.includeManifestCapabilities)
&& Objects.equals(maxResults, that.maxResults)
&& Objects.equals(pageToken, that.pageToken)
&& Objects.equals(schemaNamePattern, that.schemaNamePattern)
@@ -106,13 +121,20 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
- return Objects.hash(catalogName, maxResults, pageToken, schemaNamePattern, tableNamePattern);
+ return Objects.hash(
+ catalogName,
+ includeManifestCapabilities,
+ maxResults,
+ pageToken,
+ schemaNamePattern,
+ tableNamePattern);
}
@Override
public String toString() {
return new ToStringer(ListSummariesRequest.class)
.add("catalogName", catalogName)
+ .add("includeManifestCapabilities", includeManifestCapabilities)
.add("maxResults", maxResults)
.add("pageToken", pageToken)
.add("schemaNamePattern", schemaNamePattern)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequest.java
index 638d6c8c..8a53c278 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ListTablesRequest.java
@@ -29,6 +29,11 @@ public class ListTablesRequest {
@QueryParam("include_delta_metadata")
private Boolean includeDeltaMetadata;
+ /** Whether to include a manifest containing capabilities the table has. */
+ @JsonIgnore
+ @QueryParam("include_manifest_capabilities")
+ private Boolean includeManifestCapabilities;
+
/**
* Maximum number of tables to return. If not set, all the tables are returned (not recommended).
* - when set to a value greater than 0, the page length is the minimum of this value and a server
@@ -86,6 +91,15 @@ public Boolean getIncludeDeltaMetadata() {
return includeDeltaMetadata;
}
+ public ListTablesRequest setIncludeManifestCapabilities(Boolean includeManifestCapabilities) {
+ this.includeManifestCapabilities = includeManifestCapabilities;
+ return this;
+ }
+
+ public Boolean getIncludeManifestCapabilities() {
+ return includeManifestCapabilities;
+ }
+
public ListTablesRequest setMaxResults(Long maxResults) {
this.maxResults = maxResults;
return this;
@@ -139,6 +153,7 @@ public boolean equals(Object o) {
return Objects.equals(catalogName, that.catalogName)
&& Objects.equals(includeBrowse, that.includeBrowse)
&& Objects.equals(includeDeltaMetadata, that.includeDeltaMetadata)
+ && Objects.equals(includeManifestCapabilities, that.includeManifestCapabilities)
&& Objects.equals(maxResults, that.maxResults)
&& Objects.equals(omitColumns, that.omitColumns)
&& Objects.equals(omitProperties, that.omitProperties)
@@ -152,6 +167,7 @@ public int hashCode() {
catalogName,
includeBrowse,
includeDeltaMetadata,
+ includeManifestCapabilities,
maxResults,
omitColumns,
omitProperties,
@@ -165,6 +181,7 @@ public String toString() {
.add("catalogName", catalogName)
.add("includeBrowse", includeBrowse)
.add("includeDeltaMetadata", includeDeltaMetadata)
+ .add("includeManifestCapabilities", includeManifestCapabilities)
.add("maxResults", maxResults)
.add("omitColumns", omitColumns)
.add("omitProperties", omitProperties)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfo.java
index e98926c3..2eef53df 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MetastoreInfo.java
@@ -40,6 +40,10 @@ public class MetastoreInfo {
@JsonProperty("delta_sharing_scope")
private MetastoreInfoDeltaSharingScope deltaSharingScope;
+ /** Whether to allow non-DBR clients to directly access entities under the metastore. */
+ @JsonProperty("external_access_enabled")
+ private Boolean externalAccessEnabled;
+
/**
* Globally unique metastore ID across clouds and regions, of the form
* `cloud:region:metastore_id`.
@@ -151,6 +155,15 @@ public MetastoreInfoDeltaSharingScope getDeltaSharingScope() {
return deltaSharingScope;
}
+ public MetastoreInfo setExternalAccessEnabled(Boolean externalAccessEnabled) {
+ this.externalAccessEnabled = externalAccessEnabled;
+ return this;
+ }
+
+ public Boolean getExternalAccessEnabled() {
+ return externalAccessEnabled;
+ }
+
public MetastoreInfo setGlobalMetastoreId(String globalMetastoreId) {
this.globalMetastoreId = globalMetastoreId;
return this;
@@ -264,6 +277,7 @@ public boolean equals(Object o) {
deltaSharingRecipientTokenLifetimeInSeconds,
that.deltaSharingRecipientTokenLifetimeInSeconds)
&& Objects.equals(deltaSharingScope, that.deltaSharingScope)
+ && Objects.equals(externalAccessEnabled, that.externalAccessEnabled)
&& Objects.equals(globalMetastoreId, that.globalMetastoreId)
&& Objects.equals(metastoreId, that.metastoreId)
&& Objects.equals(name, that.name)
@@ -287,6 +301,7 @@ public int hashCode() {
deltaSharingOrganizationName,
deltaSharingRecipientTokenLifetimeInSeconds,
deltaSharingScope,
+ externalAccessEnabled,
globalMetastoreId,
metastoreId,
name,
@@ -312,6 +327,7 @@ public String toString() {
"deltaSharingRecipientTokenLifetimeInSeconds",
deltaSharingRecipientTokenLifetimeInSeconds)
.add("deltaSharingScope", deltaSharingScope)
+ .add("externalAccessEnabled", externalAccessEnabled)
.add("globalMetastoreId", globalMetastoreId)
.add("metastoreId", metastoreId)
.add("name", name)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/R2Credentials.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/R2Credentials.java
new file mode 100755
index 00000000..5a157c34
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/R2Credentials.java
@@ -0,0 +1,78 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/**
+ * R2 temporary credentials for API authentication. Read more at
+ * https://developers.cloudflare.com/r2/api/s3/tokens/.
+ */
+@Generated
+public class R2Credentials {
+ /** The access key ID that identifies the temporary credentials. */
+ @JsonProperty("access_key_id")
+ private String accessKeyId;
+
+ /** The secret access key associated with the access key. */
+ @JsonProperty("secret_access_key")
+ private String secretAccessKey;
+
+ /** The generated JWT that users must pass to use the temporary credentials. */
+ @JsonProperty("session_token")
+ private String sessionToken;
+
+ public R2Credentials setAccessKeyId(String accessKeyId) {
+ this.accessKeyId = accessKeyId;
+ return this;
+ }
+
+ public String getAccessKeyId() {
+ return accessKeyId;
+ }
+
+ public R2Credentials setSecretAccessKey(String secretAccessKey) {
+ this.secretAccessKey = secretAccessKey;
+ return this;
+ }
+
+ public String getSecretAccessKey() {
+ return secretAccessKey;
+ }
+
+ public R2Credentials setSessionToken(String sessionToken) {
+ this.sessionToken = sessionToken;
+ return this;
+ }
+
+ public String getSessionToken() {
+ return sessionToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ R2Credentials that = (R2Credentials) o;
+ return Objects.equals(accessKeyId, that.accessKeyId)
+ && Objects.equals(secretAccessKey, that.secretAccessKey)
+ && Objects.equals(sessionToken, that.sessionToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(accessKeyId, secretAccessKey, sessionToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(R2Credentials.class)
+ .add("accessKeyId", accessKeyId)
+ .add("secretAccessKey", secretAccessKey)
+ .add("sessionToken", sessionToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QuerySourceTrigger.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableOperation.java
similarity index 59%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QuerySourceTrigger.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableOperation.java
index 2fae09bb..6122026d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QuerySourceTrigger.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TableOperation.java
@@ -1,11 +1,11 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-package com.databricks.sdk.service.sql;
+package com.databricks.sdk.service.catalog;
import com.databricks.sdk.support.Generated;
@Generated
-public enum QuerySourceTrigger {
- MANUAL,
- SCHEDULED,
+public enum TableOperation {
+ READ,
+ READ_WRITE,
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java
new file mode 100755
index 00000000..dd496a46
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsAPI.java
@@ -0,0 +1,56 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud
+ * storage locationswhere table data is stored in Databricks. These credentials are employed to
+ * provide secure and time-limitedaccess to data in cloud environments such as AWS, Azure, and
+ * Google Cloud. Each cloud provider has its own typeof credentials: AWS uses temporary session
+ * tokens via AWS Security Token Service (STS), Azure utilizesShared Access Signatures (SAS) for its
+ * data storage services, and Google Cloud supports temporary credentialsthrough OAuth 2.0.Temporary
+ * table credentials ensure that data access is limited in scope and duration, reducing the risk
+ * ofunauthorized access or misuse. To use the temporary table credentials API, a metastore admin
+ * needs to enable the external_access_enabled flag (off by default) at the metastore level, and
+ * user needs to be granted the EXTERNAL USE SCHEMA permission at the schema level by catalog admin.
+ * Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by catalog
+ * admin explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for
+ * security reason.
+ */
+@Generated
+public class TemporaryTableCredentialsAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(TemporaryTableCredentialsAPI.class);
+
+ private final TemporaryTableCredentialsService impl;
+
+ /** Regular-use constructor */
+ public TemporaryTableCredentialsAPI(ApiClient apiClient) {
+ impl = new TemporaryTableCredentialsImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public TemporaryTableCredentialsAPI(TemporaryTableCredentialsService mock) {
+ impl = mock;
+ }
+
+ /**
+ * Generate a temporary table credential.
+ *
+ * Get a short-lived credential for directly accessing the table data on cloud storage. The
+ * metastore must have external_access_enabled flag set to true (default false). The caller must
+ * have EXTERNAL_USE_SCHEMA privilege on the parent schema and this privilege can only be granted
+ * by catalog owners.
+ */
+ public GenerateTemporaryTableCredentialResponse generateTemporaryTableCredentials(
+ GenerateTemporaryTableCredentialRequest request) {
+ return impl.generateTemporaryTableCredentials(request);
+ }
+
+ public TemporaryTableCredentialsService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsImpl.java
new file mode 100755
index 00000000..56247697
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsImpl.java
@@ -0,0 +1,27 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import java.util.HashMap;
+import java.util.Map;
+
+/** Package-local implementation of TemporaryTableCredentials */
+@Generated
+class TemporaryTableCredentialsImpl implements TemporaryTableCredentialsService {
+ private final ApiClient apiClient;
+
+ public TemporaryTableCredentialsImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public GenerateTemporaryTableCredentialResponse generateTemporaryTableCredentials(
+ GenerateTemporaryTableCredentialRequest request) {
+ String path = "/api/2.0/unity-catalog/temporary-table-credentials";
+ Map This is the high-level interface, that contains generated methods.
+ *
+ * Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface TemporaryTableCredentialsService {
+ /**
+ * Generate a temporary table credential.
+ *
+ * Get a short-lived credential for directly accessing the table data on cloud storage. The
+ * metastore must have external_access_enabled flag set to true (default false). The caller must
+ * have EXTERNAL_USE_SCHEMA privilege on the parent schema and this privilege can only be granted
+ * by catalog owners.
+ */
+ GenerateTemporaryTableCredentialResponse generateTemporaryTableCredentials(
+ GenerateTemporaryTableCredentialRequest generateTemporaryTableCredentialRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java
index f4e79efd..3787fbb8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterAttributes.java
@@ -150,8 +150,13 @@ public class ClusterAttributes {
private String policyId;
/**
- * Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime
- * engine is inferred from spark_version.
+ * Determines the cluster's runtime engine, either standard or Photon.
+ *
+ * This field is not compatible with legacy `spark_version` values that contain `-photon-`.
+ * Remove `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+ *
+ * If left unspecified, the runtime engine defaults to standard unless the spark_version
+ * contains -photon-, in which case Photon will be used.
*/
@JsonProperty("runtime_engine")
private RuntimeEngine runtimeEngine;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java
index e1c192ea..ab02e71d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterDetails.java
@@ -249,8 +249,13 @@ public class ClusterDetails {
private String policyId;
/**
- * Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime
- * engine is inferred from spark_version.
+ * Determines the cluster's runtime engine, either standard or Photon.
+ *
+ * This field is not compatible with legacy `spark_version` values that contain `-photon-`.
+ * Remove `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+ *
+ * If left unspecified, the runtime engine defaults to standard unless the spark_version
+ * contains -photon-, in which case Photon will be used.
*/
@JsonProperty("runtime_engine")
private RuntimeEngine runtimeEngine;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java
index 0236f53c..f5d7f925 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClusterSpec.java
@@ -177,8 +177,13 @@ public class ClusterSpec {
private String policyId;
/**
- * Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime
- * engine is inferred from spark_version.
+ * Determines the cluster's runtime engine, either standard or Photon.
+ *
+ * This field is not compatible with legacy `spark_version` values that contain `-photon-`.
+ * Remove `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+ *
+ * If left unspecified, the runtime engine defaults to standard unless the spark_version
+ * contains -photon-, in which case Photon will be used.
*/
@JsonProperty("runtime_engine")
private RuntimeEngine runtimeEngine;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java
index 6cc810fc..2fe2801c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ClustersAPI.java
@@ -170,6 +170,11 @@ public Wait If Databricks acquires at least 85% of the requested on-demand nodes, cluster creation will
* succeed. Otherwise the cluster will terminate with an informative error message.
+ *
+ * Rather than authoring the cluster's JSON definition from scratch, Databricks recommends
+ * filling out the [create compute UI] and then copying the generated JSON definition from the UI.
+ *
+ * [create compute UI]: https://docs.databricks.com/compute/configure.html
*/
public Wait If Databricks acquires at least 85% of the requested on-demand nodes, cluster creation will
* succeed. Otherwise the cluster will terminate with an informative error message.
+ *
+ * Rather than authoring the cluster's JSON definition from scratch, Databricks recommends
+ * filling out the [create compute UI] and then copying the generated JSON definition from the UI.
+ *
+ * [create compute UI]: https://docs.databricks.com/compute/configure.html
*/
CreateClusterResponse create(CreateCluster createCluster);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java
index 79eca90e..469137cb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java
@@ -184,8 +184,13 @@ public class CreateCluster {
private String policyId;
/**
- * Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime
- * engine is inferred from spark_version.
+ * Determines the cluster's runtime engine, either standard or Photon.
+ *
+ * This field is not compatible with legacy `spark_version` values that contain `-photon-`.
+ * Remove `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+ *
+ * If left unspecified, the runtime engine defaults to standard unless the spark_version
+ * contains -photon-, in which case Photon will be used.
*/
@JsonProperty("runtime_engine")
private RuntimeEngine runtimeEngine;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java
index d1976a22..8fb986e4 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditCluster.java
@@ -181,8 +181,13 @@ public class EditCluster {
private String policyId;
/**
- * Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime
- * engine is inferred from spark_version.
+ * Determines the cluster's runtime engine, either standard or Photon.
+ *
+ * This field is not compatible with legacy `spark_version` values that contain `-photon-`.
+ * Remove `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+ *
+ * If left unspecified, the runtime engine defaults to standard unless the spark_version
+ * contains -photon-, in which case Photon will be used.
*/
@JsonProperty("runtime_engine")
private RuntimeEngine runtimeEngine;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RuntimeEngine.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RuntimeEngine.java
index a119bb37..31d19572 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RuntimeEngine.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RuntimeEngine.java
@@ -5,8 +5,13 @@
import com.databricks.sdk.support.Generated;
/**
- * Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime
- * engine is inferred from spark_version.
+ * Determines the cluster's runtime engine, either standard or Photon.
+ *
+ * This field is not compatible with legacy `spark_version` values that contain `-photon-`.
+ * Remove `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+ *
+ * If left unspecified, the runtime engine defaults to standard unless the spark_version contains
+ * -photon-, in which case Photon will be used.
*/
@Generated
public enum RuntimeEngine {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java
index 3f2e0ebe..8f9d8201 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java
@@ -170,8 +170,13 @@ public class UpdateClusterResource {
private String policyId;
/**
- * Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime
- * engine is inferred from spark_version.
+ * Determines the cluster's runtime engine, either standard or Photon.
+ *
+ * This field is not compatible with legacy `spark_version` values that contain `-photon-`.
+ * Remove `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+ *
+ * If left unspecified, the runtime engine defaults to standard unless the spark_version
+ * contains -photon-, in which case Photon will be used.
*/
@JsonProperty("runtime_engine")
private RuntimeEngine runtimeEngine;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java
index 9fda1f3c..040a695d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/CreateJob.java
@@ -49,8 +49,11 @@ public class CreateJob {
private JobEmailNotifications emailNotifications;
/**
- * A list of task execution environment specifications that can be referenced by tasks of this
- * job.
+ * A list of task execution environment specifications that can be referenced by serverless tasks
+ * of this job. An environment is required to be present for serverless tasks. For serverless
+ * notebook tasks, the environment is accessible in the notebook environment panel. For other
+ * serverless tasks, the task environment is required to be specified using environment_key in the
+ * task settings.
*/
@JsonProperty("environments")
private Collection Only `user_name` or `service_principal_name` can be specified. If both are specified, an
- * error is thrown.
+ * Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If
+ * not, an error is thrown.
*/
@JsonProperty("run_as")
private JobRunAs runAs;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobRunAs.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobRunAs.java
index 92758fc9..adab90dc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobRunAs.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobRunAs.java
@@ -8,12 +8,11 @@
import java.util.Objects;
/**
- * Write-only setting, available only in Create/Update/Reset and Submit calls. Specifies the user or
- * service principal that the job runs as. If not specified, the job runs as the user who created
- * the job.
+ * Write-only setting. Specifies the user, service principal or group that the job/pipeline runs as.
+ * If not specified, the job/pipeline runs as the user who created the job/pipeline.
*
- * Only `user_name` or `service_principal_name` can be specified. If both are specified, an error
- * is thrown.
+ * Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If
+ * not, an error is thrown.
*/
@Generated
public class JobRunAs {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java
index 02d2f995..a50318ab 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/JobSettings.java
@@ -45,8 +45,11 @@ public class JobSettings {
private JobEmailNotifications emailNotifications;
/**
- * A list of task execution environment specifications that can be referenced by tasks of this
- * job.
+ * A list of task execution environment specifications that can be referenced by serverless tasks
+ * of this job. An environment is required to be present for serverless tasks. For serverless
+ * notebook tasks, the environment is accessible in the notebook environment panel. For other
+ * serverless tasks, the task environment is required to be specified using environment_key in the
+ * task settings.
*/
@JsonProperty("environments")
private Collection Only `user_name` or `service_principal_name` can be specified. If both are specified, an
- * error is thrown.
+ * Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If
+ * not, an error is thrown.
*/
@JsonProperty("run_as")
private JobRunAs runAs;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunResultState.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunResultState.java
index 68bc74b4..8a203f13 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunResultState.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/RunResultState.java
@@ -12,11 +12,13 @@
* reached. * `EXCLUDED`: The run was skipped because the necessary conditions were not met. *
* `SUCCESS_WITH_FAILURES`: The job run completed successfully with some failures; leaf tasks were
* successful. * `UPSTREAM_FAILED`: The run was skipped because of an upstream failure. *
- * `UPSTREAM_CANCELED`: The run was skipped because an upstream task was canceled.
+ * `UPSTREAM_CANCELED`: The run was skipped because an upstream task was canceled. * `DISABLED`: The
+ * run was skipped because it was disabled explicitly by the user.
*/
@Generated
public enum RunResultState {
CANCELED, // The run was canceled at user request.
+ DISABLED, // The run was skipped because it was disabled explicitly by the user.
EXCLUDED, // The run was skipped because the necessary conditions were not met.
FAILED, // The task completed with an error.
MAXIMUM_CONCURRENT_RUNS_REACHED, // The run was skipped because the maximum concurrent runs were
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java
index f6a6aefb..cd5180a1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/CreatePipeline.java
@@ -15,6 +15,10 @@ public class CreatePipeline {
@JsonProperty("allow_duplicate_names")
private Boolean allowDuplicateNames;
+ /** Budget policy of this pipeline. */
+ @JsonProperty("budget_policy_id")
+ private String budgetPolicyId;
+
/**
* A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified,
* tables in this pipeline are published to a `target` schema inside `catalog` (for example,
@@ -120,6 +124,15 @@ public Boolean getAllowDuplicateNames() {
return allowDuplicateNames;
}
+ public CreatePipeline setBudgetPolicyId(String budgetPolicyId) {
+ this.budgetPolicyId = budgetPolicyId;
+ return this;
+ }
+
+ public String getBudgetPolicyId() {
+ return budgetPolicyId;
+ }
+
public CreatePipeline setCatalog(String catalog) {
this.catalog = catalog;
return this;
@@ -315,6 +328,7 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
CreatePipeline that = (CreatePipeline) o;
return Objects.equals(allowDuplicateNames, that.allowDuplicateNames)
+ && Objects.equals(budgetPolicyId, that.budgetPolicyId)
&& Objects.equals(catalog, that.catalog)
&& Objects.equals(channel, that.channel)
&& Objects.equals(clusters, that.clusters)
@@ -342,6 +356,7 @@ public boolean equals(Object o) {
public int hashCode() {
return Objects.hash(
allowDuplicateNames,
+ budgetPolicyId,
catalog,
channel,
clusters,
@@ -369,6 +384,7 @@ public int hashCode() {
public String toString() {
return new ToStringer(CreatePipeline.class)
.add("allowDuplicateNames", allowDuplicateNames)
+ .add("budgetPolicyId", budgetPolicyId)
.add("catalog", catalog)
.add("channel", channel)
.add("clusters", clusters)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java
index e1f7c34b..0ea12b58 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/EditPipeline.java
@@ -17,6 +17,10 @@ public class EditPipeline {
@JsonProperty("allow_duplicate_names")
private Boolean allowDuplicateNames;
+ /** Budget policy of this pipeline. */
+ @JsonProperty("budget_policy_id")
+ private String budgetPolicyId;
+
/**
* A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified,
* tables in this pipeline are published to a `target` schema inside `catalog` (for example,
@@ -129,6 +133,15 @@ public Boolean getAllowDuplicateNames() {
return allowDuplicateNames;
}
+ public EditPipeline setBudgetPolicyId(String budgetPolicyId) {
+ this.budgetPolicyId = budgetPolicyId;
+ return this;
+ }
+
+ public String getBudgetPolicyId() {
+ return budgetPolicyId;
+ }
+
public EditPipeline setCatalog(String catalog) {
this.catalog = catalog;
return this;
@@ -333,6 +346,7 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
EditPipeline that = (EditPipeline) o;
return Objects.equals(allowDuplicateNames, that.allowDuplicateNames)
+ && Objects.equals(budgetPolicyId, that.budgetPolicyId)
&& Objects.equals(catalog, that.catalog)
&& Objects.equals(channel, that.channel)
&& Objects.equals(clusters, that.clusters)
@@ -361,6 +375,7 @@ public boolean equals(Object o) {
public int hashCode() {
return Objects.hash(
allowDuplicateNames,
+ budgetPolicyId,
catalog,
channel,
clusters,
@@ -389,6 +404,7 @@ public int hashCode() {
public String toString() {
return new ToStringer(EditPipeline.class)
.add("allowDuplicateNames", allowDuplicateNames)
+ .add("budgetPolicyId", budgetPolicyId)
.add("catalog", catalog)
.add("channel", channel)
.add("clusters", clusters)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponse.java
index e7bf19d2..0654879e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/GetPipelineResponse.java
@@ -22,6 +22,10 @@ public class GetPipelineResponse {
@JsonProperty("creator_user_name")
private String creatorUserName;
+ /** Serverless budget policy ID of this pipeline. */
+ @JsonProperty("effective_budget_policy_id")
+ private String effectiveBudgetPolicyId;
+
/** The health of a pipeline. */
@JsonProperty("health")
private GetPipelineResponseHealth health;
@@ -81,6 +85,15 @@ public String getCreatorUserName() {
return creatorUserName;
}
+ public GetPipelineResponse setEffectiveBudgetPolicyId(String effectiveBudgetPolicyId) {
+ this.effectiveBudgetPolicyId = effectiveBudgetPolicyId;
+ return this;
+ }
+
+ public String getEffectiveBudgetPolicyId() {
+ return effectiveBudgetPolicyId;
+ }
+
public GetPipelineResponse setHealth(GetPipelineResponseHealth health) {
this.health = health;
return this;
@@ -161,6 +174,7 @@ public boolean equals(Object o) {
return Objects.equals(cause, that.cause)
&& Objects.equals(clusterId, that.clusterId)
&& Objects.equals(creatorUserName, that.creatorUserName)
+ && Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId)
&& Objects.equals(health, that.health)
&& Objects.equals(lastModified, that.lastModified)
&& Objects.equals(latestUpdates, that.latestUpdates)
@@ -177,6 +191,7 @@ public int hashCode() {
cause,
clusterId,
creatorUserName,
+ effectiveBudgetPolicyId,
health,
lastModified,
latestUpdates,
@@ -193,6 +208,7 @@ public String toString() {
.add("cause", cause)
.add("clusterId", clusterId)
.add("creatorUserName", creatorUserName)
+ .add("effectiveBudgetPolicyId", effectiveBudgetPolicyId)
.add("health", health)
.add("lastModified", lastModified)
.add("latestUpdates", latestUpdates)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java
index 8b796bdb..8561d258 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelineSpec.java
@@ -11,6 +11,10 @@
@Generated
public class PipelineSpec {
+ /** Budget policy of this pipeline. */
+ @JsonProperty("budget_policy_id")
+ private String budgetPolicyId;
+
/**
* A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified,
* tables in this pipeline are published to a `target` schema inside `catalog` (for example,
@@ -103,6 +107,15 @@ public class PipelineSpec {
@JsonProperty("trigger")
private PipelineTrigger trigger;
+ public PipelineSpec setBudgetPolicyId(String budgetPolicyId) {
+ this.budgetPolicyId = budgetPolicyId;
+ return this;
+ }
+
+ public String getBudgetPolicyId() {
+ return budgetPolicyId;
+ }
+
public PipelineSpec setCatalog(String catalog) {
this.catalog = catalog;
return this;
@@ -288,7 +301,8 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
PipelineSpec that = (PipelineSpec) o;
- return Objects.equals(catalog, that.catalog)
+ return Objects.equals(budgetPolicyId, that.budgetPolicyId)
+ && Objects.equals(catalog, that.catalog)
&& Objects.equals(channel, that.channel)
&& Objects.equals(clusters, that.clusters)
&& Objects.equals(configuration, that.configuration)
@@ -313,6 +327,7 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
return Objects.hash(
+ budgetPolicyId,
catalog,
channel,
clusters,
@@ -338,6 +353,7 @@ public int hashCode() {
@Override
public String toString() {
return new ToStringer(PipelineSpec.class)
+ .add("budgetPolicyId", budgetPolicyId)
.add("catalog", catalog)
.add("channel", channel)
.add("clusters", clusters)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayConfig.java
new file mode 100755
index 00000000..9267fb17
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayConfig.java
@@ -0,0 +1,100 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.serving;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class AiGatewayConfig {
+ /**
+ * Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and
+ * responses.
+ */
+ @JsonProperty("guardrails")
+ private AiGatewayGuardrails guardrails;
+
+ /**
+ * Configuration for payload logging using inference tables. Use these tables to monitor and audit
+ * data being sent to and received from model APIs and to improve model quality.
+ */
+ @JsonProperty("inference_table_config")
+ private AiGatewayInferenceTableConfig inferenceTableConfig;
+
+ /** Configuration for rate limits which can be set to limit endpoint traffic. */
+ @JsonProperty("rate_limits")
+ private Collection Used to update the rate limits of a serving endpoint. NOTE: only external and foundation
- * model endpoints are supported as of now.
+ * Used to update the rate limits of a serving endpoint. NOTE: Only foundation model endpoints
+ * are currently supported. For external models, use AI Gateway to manage rate limits.
*/
public PutResponse put(PutRequest request) {
return impl.put(request);
}
+ public PutAiGatewayResponse putAiGateway(String name) {
+ return putAiGateway(new PutAiGatewayRequest().setName(name));
+ }
+
+ /**
+ * Update AI Gateway of a serving endpoint.
+ *
+ * Used to update the AI Gateway of a serving endpoint. NOTE: Only external model endpoints are
+ * currently supported.
+ */
+ public PutAiGatewayResponse putAiGateway(PutAiGatewayRequest request) {
+ return impl.putAiGateway(request);
+ }
+
public QueryEndpointResponse query(String name) {
return query(new QueryEndpointInput().setName(name));
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java
index 2ca707ad..5e518c6f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/ServingEndpointsImpl.java
@@ -126,6 +126,15 @@ public PutResponse put(PutRequest request) {
return apiClient.PUT(path, request, PutResponse.class, headers);
}
+ @Override
+ public PutAiGatewayResponse putAiGateway(PutAiGatewayRequest request) {
+ String path = String.format("/api/2.0/serving-endpoints/%s/ai-gateway", request.getName());
+ Map Used to update the rate limits of a serving endpoint. NOTE: only external and foundation
- * model endpoints are supported as of now.
+ * Used to update the rate limits of a serving endpoint. NOTE: Only foundation model endpoints
+ * are currently supported. For external models, use AI Gateway to manage rate limits.
*/
PutResponse put(PutRequest putRequest);
+ /**
+ * Update AI Gateway of a serving endpoint.
+ *
+ * Used to update the AI Gateway of a serving endpoint. NOTE: Only external model endpoints are
+ * currently supported.
+ */
+ PutAiGatewayResponse putAiGateway(PutAiGatewayRequest putAiGatewayRequest);
+
/** Query a serving endpoint. */
QueryEndpointResponse query(QueryEndpointInput queryEndpointInput);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java
index cb24708e..ff93f6cf 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/AccountSettingsAPI.java
@@ -15,6 +15,8 @@ public class AccountSettingsAPI {
private CspEnablementAccountAPI cspEnablementAccountAPI;
+ private DisableLegacyFeaturesAPI disableLegacyFeaturesAPI;
+
private EsmEnablementAccountAPI esmEnablementAccountAPI;
private PersonalComputeAPI personalComputeAPI;
@@ -25,6 +27,8 @@ public AccountSettingsAPI(ApiClient apiClient) {
cspEnablementAccountAPI = new CspEnablementAccountAPI(apiClient);
+ disableLegacyFeaturesAPI = new DisableLegacyFeaturesAPI(apiClient);
+
esmEnablementAccountAPI = new EsmEnablementAccountAPI(apiClient);
personalComputeAPI = new PersonalComputeAPI(apiClient);
@@ -43,6 +47,11 @@ public CspEnablementAccountAPI CspEnablementAccount() {
return cspEnablementAccountAPI;
}
+ /** Disable legacy features for new Databricks workspaces. */
+ public DisableLegacyFeaturesAPI DisableLegacyFeatures() {
+ return disableLegacyFeaturesAPI;
+ }
+
/**
* The enhanced security monitoring setting at the account level controls whether to enable the
* feature on new workspaces.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ServerlessChannelInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/BooleanMessage.java
similarity index 51%
rename from databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ServerlessChannelInfo.java
rename to databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/BooleanMessage.java
index 8b2d29cd..6eed151d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ServerlessChannelInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/BooleanMessage.java
@@ -1,6 +1,6 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-package com.databricks.sdk.service.sql;
+package com.databricks.sdk.service.settings;
import com.databricks.sdk.support.Generated;
import com.databricks.sdk.support.ToStringer;
@@ -8,35 +8,35 @@
import java.util.Objects;
@Generated
-public class ServerlessChannelInfo {
- /** Name of the Channel */
- @JsonProperty("name")
- private ChannelName name;
+public class BooleanMessage {
+ /** */
+ @JsonProperty("value")
+ private Boolean value;
- public ServerlessChannelInfo setName(ChannelName name) {
- this.name = name;
+ public BooleanMessage setValue(Boolean value) {
+ this.value = value;
return this;
}
- public ChannelName getName() {
- return name;
+ public Boolean getValue() {
+ return value;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
- ServerlessChannelInfo that = (ServerlessChannelInfo) o;
- return Objects.equals(name, that.name);
+ BooleanMessage that = (BooleanMessage) o;
+ return Objects.equals(value, that.value);
}
@Override
public int hashCode() {
- return Objects.hash(name);
+ return Objects.hash(value);
}
@Override
public String toString() {
- return new ToStringer(ServerlessChannelInfo.class).add("name", name).toString();
+ return new ToStringer(BooleanMessage.class).add("value", value).toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessRequest.java
new file mode 100755
index 00000000..c8824e5e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessRequest.java
@@ -0,0 +1,52 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Delete Legacy Access Disablement Status */
+@Generated
+public class DeleteDisableLegacyAccessRequest {
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+ * an etag from a GET request, and pass it with the DELETE request to identify the rule set
+ * version you are deleting.
+ */
+ @JsonIgnore
+ @QueryParam("etag")
+ private String etag;
+
+ public DeleteDisableLegacyAccessRequest setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteDisableLegacyAccessRequest that = (DeleteDisableLegacyAccessRequest) o;
+ return Objects.equals(etag, that.etag);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(etag);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteDisableLegacyAccessRequest.class).add("etag", etag).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessResponse.java
new file mode 100755
index 00000000..ad22d588
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyAccessResponse.java
@@ -0,0 +1,50 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** The etag is returned. */
+@Generated
+public class DeleteDisableLegacyAccessResponse {
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+ * an etag from a GET request, and pass it with the DELETE request to identify the rule set
+ * version you are deleting.
+ */
+ @JsonProperty("etag")
+ private String etag;
+
+ public DeleteDisableLegacyAccessResponse setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteDisableLegacyAccessResponse that = (DeleteDisableLegacyAccessResponse) o;
+ return Objects.equals(etag, that.etag);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(etag);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteDisableLegacyAccessResponse.class).add("etag", etag).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyFeaturesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyFeaturesRequest.java
new file mode 100755
index 00000000..23fbc594
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyFeaturesRequest.java
@@ -0,0 +1,52 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Delete the disable legacy features setting */
+@Generated
+public class DeleteDisableLegacyFeaturesRequest {
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+ * an etag from a GET request, and pass it with the DELETE request to identify the rule set
+ * version you are deleting.
+ */
+ @JsonIgnore
+ @QueryParam("etag")
+ private String etag;
+
+ public DeleteDisableLegacyFeaturesRequest setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteDisableLegacyFeaturesRequest that = (DeleteDisableLegacyFeaturesRequest) o;
+ return Objects.equals(etag, that.etag);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(etag);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteDisableLegacyFeaturesRequest.class).add("etag", etag).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyFeaturesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyFeaturesResponse.java
new file mode 100755
index 00000000..dd7b8d8d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DeleteDisableLegacyFeaturesResponse.java
@@ -0,0 +1,50 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** The etag is returned. */
+@Generated
+public class DeleteDisableLegacyFeaturesResponse {
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+ * an etag from a GET request, and pass it with the DELETE request to identify the rule set
+ * version you are deleting.
+ */
+ @JsonProperty("etag")
+ private String etag;
+
+ public DeleteDisableLegacyFeaturesResponse setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteDisableLegacyFeaturesResponse that = (DeleteDisableLegacyFeaturesResponse) o;
+ return Objects.equals(etag, that.etag);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(etag);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteDisableLegacyFeaturesResponse.class).add("etag", etag).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccess.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccess.java
new file mode 100755
index 00000000..c9ba4ab6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccess.java
@@ -0,0 +1,86 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class DisableLegacyAccess {
+ /** */
+ @JsonProperty("disable_legacy_access")
+ private BooleanMessage disableLegacyAccess;
+
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> update pattern to perform setting updates in order to avoid race conditions. That is, get an
+ * etag from a GET request, and pass it with the PATCH request to identify the setting version you
+ * are updating.
+ */
+ @JsonProperty("etag")
+ private String etag;
+
+ /**
+ * Name of the corresponding setting. This field is populated in the response, but it will not be
+ * respected even if it's set in the request body. The setting name in the path parameter will be
+ * respected instead. Setting name is required to be 'default' if the setting only has one
+ * instance per workspace.
+ */
+ @JsonProperty("setting_name")
+ private String settingName;
+
+ public DisableLegacyAccess setDisableLegacyAccess(BooleanMessage disableLegacyAccess) {
+ this.disableLegacyAccess = disableLegacyAccess;
+ return this;
+ }
+
+ public BooleanMessage getDisableLegacyAccess() {
+ return disableLegacyAccess;
+ }
+
+ public DisableLegacyAccess setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ public DisableLegacyAccess setSettingName(String settingName) {
+ this.settingName = settingName;
+ return this;
+ }
+
+ public String getSettingName() {
+ return settingName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DisableLegacyAccess that = (DisableLegacyAccess) o;
+ return Objects.equals(disableLegacyAccess, that.disableLegacyAccess)
+ && Objects.equals(etag, that.etag)
+ && Objects.equals(settingName, that.settingName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(disableLegacyAccess, etag, settingName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DisableLegacyAccess.class)
+ .add("disableLegacyAccess", disableLegacyAccess)
+ .add("etag", etag)
+ .add("settingName", settingName)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessAPI.java
new file mode 100755
index 00000000..673a7b95
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessAPI.java
@@ -0,0 +1,72 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * 'Disabling legacy access' has the following impacts:
+ *
+ * 1. Disables direct access to the Hive Metastore. However, you can still access Hive Metastore
+ * through HMS Federation. 2. Disables Fallback Mode (docs link) on any External Location access
+ * from the workspace. 3. Alters DBFS path access to use External Location permissions in place of
+ * legacy credentials. 4. Enforces Unity Catalog access on all path based access.
+ */
+@Generated
+public class DisableLegacyAccessAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(DisableLegacyAccessAPI.class);
+
+ private final DisableLegacyAccessService impl;
+
+ /** Regular-use constructor */
+ public DisableLegacyAccessAPI(ApiClient apiClient) {
+ impl = new DisableLegacyAccessImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public DisableLegacyAccessAPI(DisableLegacyAccessService mock) {
+ impl = mock;
+ }
+
+ /**
+ * Delete Legacy Access Disablement Status.
+ *
+ * Deletes legacy access disablement status.
+ */
+ public DeleteDisableLegacyAccessResponse delete(DeleteDisableLegacyAccessRequest request) {
+ return impl.delete(request);
+ }
+
+ /**
+ * Retrieve Legacy Access Disablement Status.
+ *
+ * Retrieves legacy access disablement Status.
+ */
+ public DisableLegacyAccess get(GetDisableLegacyAccessRequest request) {
+ return impl.get(request);
+ }
+
+ public DisableLegacyAccess update(
+ boolean allowMissing, DisableLegacyAccess setting, String fieldMask) {
+ return update(
+ new UpdateDisableLegacyAccessRequest()
+ .setAllowMissing(allowMissing)
+ .setSetting(setting)
+ .setFieldMask(fieldMask));
+ }
+
+ /**
+ * Update Legacy Access Disablement Status.
+ *
+ * Updates legacy access disablement status.
+ */
+ public DisableLegacyAccess update(UpdateDisableLegacyAccessRequest request) {
+ return impl.update(request);
+ }
+
+ public DisableLegacyAccessService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessImpl.java
new file mode 100755
index 00000000..2c7380c9
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyAccessImpl.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import java.util.HashMap;
+import java.util.Map;
+
+/** Package-local implementation of DisableLegacyAccess */
+@Generated
+class DisableLegacyAccessImpl implements DisableLegacyAccessService {
+ private final ApiClient apiClient;
+
+ public DisableLegacyAccessImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public DeleteDisableLegacyAccessResponse delete(DeleteDisableLegacyAccessRequest request) {
+ String path = "/api/2.0/settings/types/disable_legacy_access/names/default";
+ Map 1. Disables direct access to the Hive Metastore. However, you can still access Hive Metastore
+ * through HMS Federation. 2. Disables Fallback Mode (docs link) on any External Location access
+ * from the workspace. 3. Alters DBFS path access to use External Location permissions in place of
+ * legacy credentials. 4. Enforces Unity Catalog access on all path based access.
+ *
+ * This is the high-level interface, that contains generated methods.
+ *
+ * Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface DisableLegacyAccessService {
+ /**
+ * Delete Legacy Access Disablement Status.
+ *
+ * Deletes legacy access disablement status.
+ */
+ DeleteDisableLegacyAccessResponse delete(
+ DeleteDisableLegacyAccessRequest deleteDisableLegacyAccessRequest);
+
+ /**
+ * Retrieve Legacy Access Disablement Status.
+ *
+ * Retrieves legacy access disablement Status.
+ */
+ DisableLegacyAccess get(GetDisableLegacyAccessRequest getDisableLegacyAccessRequest);
+
+ /**
+ * Update Legacy Access Disablement Status.
+ *
+ * Updates legacy access disablement status.
+ */
+ DisableLegacyAccess update(UpdateDisableLegacyAccessRequest updateDisableLegacyAccessRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeatures.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeatures.java
new file mode 100755
index 00000000..fed2ee95
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeatures.java
@@ -0,0 +1,86 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class DisableLegacyFeatures {
+ /** */
+ @JsonProperty("disable_legacy_features")
+ private BooleanMessage disableLegacyFeatures;
+
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> update pattern to perform setting updates in order to avoid race conditions. That is, get an
+ * etag from a GET request, and pass it with the PATCH request to identify the setting version you
+ * are updating.
+ */
+ @JsonProperty("etag")
+ private String etag;
+
+ /**
+ * Name of the corresponding setting. This field is populated in the response, but it will not be
+ * respected even if it's set in the request body. The setting name in the path parameter will be
+ * respected instead. Setting name is required to be 'default' if the setting only has one
+ * instance per workspace.
+ */
+ @JsonProperty("setting_name")
+ private String settingName;
+
+ public DisableLegacyFeatures setDisableLegacyFeatures(BooleanMessage disableLegacyFeatures) {
+ this.disableLegacyFeatures = disableLegacyFeatures;
+ return this;
+ }
+
+ public BooleanMessage getDisableLegacyFeatures() {
+ return disableLegacyFeatures;
+ }
+
+ public DisableLegacyFeatures setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ public DisableLegacyFeatures setSettingName(String settingName) {
+ this.settingName = settingName;
+ return this;
+ }
+
+ public String getSettingName() {
+ return settingName;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DisableLegacyFeatures that = (DisableLegacyFeatures) o;
+ return Objects.equals(disableLegacyFeatures, that.disableLegacyFeatures)
+ && Objects.equals(etag, that.etag)
+ && Objects.equals(settingName, that.settingName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(disableLegacyFeatures, etag, settingName);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DisableLegacyFeatures.class)
+ .add("disableLegacyFeatures", disableLegacyFeatures)
+ .add("etag", etag)
+ .add("settingName", settingName)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesAPI.java
new file mode 100755
index 00000000..1c5e5299
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesAPI.java
@@ -0,0 +1,71 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Disable legacy features for new Databricks workspaces.
+ *
+ * For newly created workspaces: 1. Disables the use of DBFS root and mounts. 2. Hive Metastore
+ * will not be provisioned. 3. Disables the use of ‘No-isolation clusters’. 4. Disables Databricks
+ * Runtime versions prior to 13.3LTS.
+ */
+@Generated
+public class DisableLegacyFeaturesAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(DisableLegacyFeaturesAPI.class);
+
+ private final DisableLegacyFeaturesService impl;
+
+ /** Regular-use constructor */
+ public DisableLegacyFeaturesAPI(ApiClient apiClient) {
+ impl = new DisableLegacyFeaturesImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public DisableLegacyFeaturesAPI(DisableLegacyFeaturesService mock) {
+ impl = mock;
+ }
+
+ /**
+ * Delete the disable legacy features setting.
+ *
+ * Deletes the disable legacy features setting.
+ */
+ public DeleteDisableLegacyFeaturesResponse delete(DeleteDisableLegacyFeaturesRequest request) {
+ return impl.delete(request);
+ }
+
+ /**
+ * Get the disable legacy features setting.
+ *
+ * Gets the value of the disable legacy features setting.
+ */
+ public DisableLegacyFeatures get(GetDisableLegacyFeaturesRequest request) {
+ return impl.get(request);
+ }
+
+ public DisableLegacyFeatures update(
+ boolean allowMissing, DisableLegacyFeatures setting, String fieldMask) {
+ return update(
+ new UpdateDisableLegacyFeaturesRequest()
+ .setAllowMissing(allowMissing)
+ .setSetting(setting)
+ .setFieldMask(fieldMask));
+ }
+
+ /**
+ * Update the disable legacy features setting.
+ *
+ * Updates the value of the disable legacy features setting.
+ */
+ public DisableLegacyFeatures update(UpdateDisableLegacyFeaturesRequest request) {
+ return impl.update(request);
+ }
+
+ public DisableLegacyFeaturesService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesImpl.java
new file mode 100755
index 00000000..e039d940
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/DisableLegacyFeaturesImpl.java
@@ -0,0 +1,51 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import java.util.HashMap;
+import java.util.Map;
+
+/** Package-local implementation of DisableLegacyFeatures */
+@Generated
+class DisableLegacyFeaturesImpl implements DisableLegacyFeaturesService {
+ private final ApiClient apiClient;
+
+ public DisableLegacyFeaturesImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public DeleteDisableLegacyFeaturesResponse delete(DeleteDisableLegacyFeaturesRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/accounts/%s/settings/types/disable_legacy_features/names/default",
+ apiClient.configuredAccountID());
+ Map For newly created workspaces: 1. Disables the use of DBFS root and mounts. 2. Hive Metastore
+ * will not be provisioned. 3. Disables the use of ‘No-isolation clusters’. 4. Disables Databricks
+ * Runtime versions prior to 13.3LTS.
+ *
+ * This is the high-level interface, that contains generated methods.
+ *
+ * Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface DisableLegacyFeaturesService {
+ /**
+ * Delete the disable legacy features setting.
+ *
+ * Deletes the disable legacy features setting.
+ */
+ DeleteDisableLegacyFeaturesResponse delete(
+ DeleteDisableLegacyFeaturesRequest deleteDisableLegacyFeaturesRequest);
+
+ /**
+ * Get the disable legacy features setting.
+ *
+ * Gets the value of the disable legacy features setting.
+ */
+ DisableLegacyFeatures get(GetDisableLegacyFeaturesRequest getDisableLegacyFeaturesRequest);
+
+ /**
+ * Update the disable legacy features setting.
+ *
+ * Updates the value of the disable legacy features setting.
+ */
+ DisableLegacyFeatures update(
+ UpdateDisableLegacyFeaturesRequest updateDisableLegacyFeaturesRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyAccessRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyAccessRequest.java
new file mode 100755
index 00000000..75c0e920
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyAccessRequest.java
@@ -0,0 +1,52 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Retrieve Legacy Access Disablement Status */
+@Generated
+public class GetDisableLegacyAccessRequest {
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+ * an etag from a GET request, and pass it with the DELETE request to identify the rule set
+ * version you are deleting.
+ */
+ @JsonIgnore
+ @QueryParam("etag")
+ private String etag;
+
+ public GetDisableLegacyAccessRequest setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetDisableLegacyAccessRequest that = (GetDisableLegacyAccessRequest) o;
+ return Objects.equals(etag, that.etag);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(etag);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetDisableLegacyAccessRequest.class).add("etag", etag).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyFeaturesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyFeaturesRequest.java
new file mode 100755
index 00000000..edf3a313
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/GetDisableLegacyFeaturesRequest.java
@@ -0,0 +1,52 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+/** Get the disable legacy features setting */
+@Generated
+public class GetDisableLegacyFeaturesRequest {
+ /**
+ * etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+ * for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+ * overwriting each other. It is strongly suggested that systems make use of the etag in the read
+ * -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+ * an etag from a GET request, and pass it with the DELETE request to identify the rule set
+ * version you are deleting.
+ */
+ @JsonIgnore
+ @QueryParam("etag")
+ private String etag;
+
+ public GetDisableLegacyFeaturesRequest setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+ public String getEtag() {
+ return etag;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetDisableLegacyFeaturesRequest that = (GetDisableLegacyFeaturesRequest) o;
+ return Objects.equals(etag, that.etag);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(etag);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetDisableLegacyFeaturesRequest.class).add("etag", etag).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java
index 93cf8221..1fde6391 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/SettingsAPI.java
@@ -19,6 +19,8 @@ public class SettingsAPI {
private DefaultNamespaceAPI defaultNamespaceAPI;
+ private DisableLegacyAccessAPI disableLegacyAccessAPI;
+
private EnhancedSecurityMonitoringAPI enhancedSecurityMonitoringAPI;
private RestrictWorkspaceAdminsAPI restrictWorkspaceAdminsAPI;
@@ -33,6 +35,8 @@ public SettingsAPI(ApiClient apiClient) {
defaultNamespaceAPI = new DefaultNamespaceAPI(apiClient);
+ disableLegacyAccessAPI = new DisableLegacyAccessAPI(apiClient);
+
enhancedSecurityMonitoringAPI = new EnhancedSecurityMonitoringAPI(apiClient);
restrictWorkspaceAdminsAPI = new RestrictWorkspaceAdminsAPI(apiClient);
@@ -61,6 +65,11 @@ public DefaultNamespaceAPI DefaultNamespace() {
return defaultNamespaceAPI;
}
+ /** 'Disabling legacy access' has the following impacts: 1. */
+ public DisableLegacyAccessAPI DisableLegacyAccess() {
+ return disableLegacyAccessAPI;
+ }
+
/** Controls whether enhanced security monitoring is enabled for the current workspace. */
public EnhancedSecurityMonitoringAPI EnhancedSecurityMonitoring() {
return enhancedSecurityMonitoringAPI;
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenInfo.java
index 45e45a62..20c4a6bf 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenInfo.java
@@ -37,6 +37,10 @@ public class TokenInfo {
@JsonProperty("token_id")
private String tokenId;
+ /** If applicable, the ID of the workspace that the token was created in. */
+ @JsonProperty("workspace_id")
+ private Long workspaceId;
+
public TokenInfo setComment(String comment) {
this.comment = comment;
return this;
@@ -100,6 +104,15 @@ public String getTokenId() {
return tokenId;
}
+ public TokenInfo setWorkspaceId(Long workspaceId) {
+ this.workspaceId = workspaceId;
+ return this;
+ }
+
+ public Long getWorkspaceId() {
+ return workspaceId;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
@@ -111,13 +124,21 @@ public boolean equals(Object o) {
&& Objects.equals(creationTime, that.creationTime)
&& Objects.equals(expiryTime, that.expiryTime)
&& Objects.equals(ownerId, that.ownerId)
- && Objects.equals(tokenId, that.tokenId);
+ && Objects.equals(tokenId, that.tokenId)
+ && Objects.equals(workspaceId, that.workspaceId);
}
@Override
public int hashCode() {
return Objects.hash(
- comment, createdById, createdByUsername, creationTime, expiryTime, ownerId, tokenId);
+ comment,
+ createdById,
+ createdByUsername,
+ creationTime,
+ expiryTime,
+ ownerId,
+ tokenId,
+ workspaceId);
}
@Override
@@ -130,6 +151,7 @@ public String toString() {
.add("expiryTime", expiryTime)
.add("ownerId", ownerId)
.add("tokenId", tokenId)
+ .add("workspaceId", workspaceId)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenType.java
index 55550bc3..23068a3e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenType.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/TokenType.java
@@ -7,5 +7,6 @@
/** The type of token request. As of now, only `AZURE_ACTIVE_DIRECTORY_TOKEN` is supported. */
@Generated
public enum TokenType {
+ ARCLIGHT_AZURE_EXCHANGE_TOKEN,
AZURE_ACTIVE_DIRECTORY_TOKEN,
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyAccessRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyAccessRequest.java
new file mode 100755
index 00000000..a90d8df9
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyAccessRequest.java
@@ -0,0 +1,79 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Details required to update a setting. */
+@Generated
+public class UpdateDisableLegacyAccessRequest {
+ /** This should always be set to true for Settings API. Added for AIP compliance. */
+ @JsonProperty("allow_missing")
+ private Boolean allowMissing;
+
+ /**
+ * Field mask is required to be passed into the PATCH request. Field mask specifies which fields
+ * of the setting payload will be updated. The field mask needs to be supplied as single string.
+ * To specify multiple fields in the field mask, use comma as the separator (no space).
+ */
+ @JsonProperty("field_mask")
+ private String fieldMask;
+
+ /** */
+ @JsonProperty("setting")
+ private DisableLegacyAccess setting;
+
+ public UpdateDisableLegacyAccessRequest setAllowMissing(Boolean allowMissing) {
+ this.allowMissing = allowMissing;
+ return this;
+ }
+
+ public Boolean getAllowMissing() {
+ return allowMissing;
+ }
+
+ public UpdateDisableLegacyAccessRequest setFieldMask(String fieldMask) {
+ this.fieldMask = fieldMask;
+ return this;
+ }
+
+ public String getFieldMask() {
+ return fieldMask;
+ }
+
+ public UpdateDisableLegacyAccessRequest setSetting(DisableLegacyAccess setting) {
+ this.setting = setting;
+ return this;
+ }
+
+ public DisableLegacyAccess getSetting() {
+ return setting;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateDisableLegacyAccessRequest that = (UpdateDisableLegacyAccessRequest) o;
+ return Objects.equals(allowMissing, that.allowMissing)
+ && Objects.equals(fieldMask, that.fieldMask)
+ && Objects.equals(setting, that.setting);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(allowMissing, fieldMask, setting);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateDisableLegacyAccessRequest.class)
+ .add("allowMissing", allowMissing)
+ .add("fieldMask", fieldMask)
+ .add("setting", setting)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyFeaturesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyFeaturesRequest.java
new file mode 100755
index 00000000..c6c77c61
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/UpdateDisableLegacyFeaturesRequest.java
@@ -0,0 +1,79 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settings;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/** Details required to update a setting. */
+@Generated
+public class UpdateDisableLegacyFeaturesRequest {
+ /** This should always be set to true for Settings API. Added for AIP compliance. */
+ @JsonProperty("allow_missing")
+ private Boolean allowMissing;
+
+ /**
+ * Field mask is required to be passed into the PATCH request. Field mask specifies which fields
+ * of the setting payload will be updated. The field mask needs to be supplied as single string.
+ * To specify multiple fields in the field mask, use comma as the separator (no space).
+ */
+ @JsonProperty("field_mask")
+ private String fieldMask;
+
+ /** */
+ @JsonProperty("setting")
+ private DisableLegacyFeatures setting;
+
+ public UpdateDisableLegacyFeaturesRequest setAllowMissing(Boolean allowMissing) {
+ this.allowMissing = allowMissing;
+ return this;
+ }
+
+ public Boolean getAllowMissing() {
+ return allowMissing;
+ }
+
+ public UpdateDisableLegacyFeaturesRequest setFieldMask(String fieldMask) {
+ this.fieldMask = fieldMask;
+ return this;
+ }
+
+ public String getFieldMask() {
+ return fieldMask;
+ }
+
+ public UpdateDisableLegacyFeaturesRequest setSetting(DisableLegacyFeatures setting) {
+ this.setting = setting;
+ return this;
+ }
+
+ public DisableLegacyFeatures getSetting() {
+ return setting;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateDisableLegacyFeaturesRequest that = (UpdateDisableLegacyFeaturesRequest) o;
+ return Objects.equals(allowMissing, that.allowMissing)
+ && Objects.equals(fieldMask, that.fieldMask)
+ && Objects.equals(setting, that.setting);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(allowMissing, fieldMask, setting);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateDisableLegacyFeaturesRequest.class)
+ .add("allowMissing", allowMissing)
+ .add("fieldMask", fieldMask)
+ .add("setting", setting)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java
index a1900b66..3b9d4cbb 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ChannelName.java
@@ -10,4 +10,5 @@ public enum ChannelName {
CHANNEL_NAME_CUSTOM,
CHANNEL_NAME_PREVIEW,
CHANNEL_NAME_PREVIOUS,
+ CHANNEL_NAME_UNSPECIFIED,
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ClientCallContext.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ClientCallContext.java
deleted file mode 100755
index ecf5b32d..00000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/ClientCallContext.java
+++ /dev/null
@@ -1,59 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.sql;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import java.util.Objects;
-
-/** Client code that triggered the request */
-@Generated
-public class ClientCallContext {
- /** File name that contains the last line that triggered the request. */
- @JsonProperty("file_name")
- private EncodedText fileName;
-
- /** Last line number within a file or notebook cell that triggered the request. */
- @JsonProperty("line_number")
- private Long lineNumber;
-
- public ClientCallContext setFileName(EncodedText fileName) {
- this.fileName = fileName;
- return this;
- }
-
- public EncodedText getFileName() {
- return fileName;
- }
-
- public ClientCallContext setLineNumber(Long lineNumber) {
- this.lineNumber = lineNumber;
- return this;
- }
-
- public Long getLineNumber() {
- return lineNumber;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- ClientCallContext that = (ClientCallContext) o;
- return Objects.equals(fileName, that.fileName) && Objects.equals(lineNumber, that.lineNumber);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(fileName, lineNumber);
- }
-
- @Override
- public String toString() {
- return new ToStringer(ClientCallContext.class)
- .add("fileName", fileName)
- .add("lineNumber", lineNumber)
- .toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EncodedText.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EncodedText.java
deleted file mode 100755
index d9da6188..00000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/EncodedText.java
+++ /dev/null
@@ -1,55 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.sql;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import java.util.Objects;
-
-@Generated
-public class EncodedText {
- /** Carry text data in different form. */
- @JsonProperty("encoding")
- private EncodedTextEncoding encoding;
-
- /** text data */
- @JsonProperty("text")
- private String text;
-
- public EncodedText setEncoding(EncodedTextEncoding encoding) {
- this.encoding = encoding;
- return this;
- }
-
- public EncodedTextEncoding getEncoding() {
- return encoding;
- }
-
- public EncodedText setText(String text) {
- this.text = text;
- return this;
- }
-
- public String getText() {
- return text;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- EncodedText that = (EncodedText) o;
- return Objects.equals(encoding, that.encoding) && Objects.equals(text, that.text);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(encoding, text);
- }
-
- @Override
- public String toString() {
- return new ToStringer(EncodedText.class).add("encoding", encoding).add("text", text).toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryInfo.java
index 60002029..21f5bded 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryInfo.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QueryInfo.java
@@ -61,10 +61,6 @@ public class QueryInfo {
@JsonProperty("query_id")
private String queryId;
- /** */
- @JsonProperty("query_source")
- private QuerySource querySource;
-
/** The time the query started. */
@JsonProperty("query_start_time_ms")
private Long queryStartTimeMs;
@@ -224,15 +220,6 @@ public String getQueryId() {
return queryId;
}
- public QueryInfo setQuerySource(QuerySource querySource) {
- this.querySource = querySource;
- return this;
- }
-
- public QuerySource getQuerySource() {
- return querySource;
- }
-
public QueryInfo setQueryStartTimeMs(Long queryStartTimeMs) {
this.queryStartTimeMs = queryStartTimeMs;
return this;
@@ -332,7 +319,6 @@ public boolean equals(Object o) {
&& Objects.equals(plansState, that.plansState)
&& Objects.equals(queryEndTimeMs, that.queryEndTimeMs)
&& Objects.equals(queryId, that.queryId)
- && Objects.equals(querySource, that.querySource)
&& Objects.equals(queryStartTimeMs, that.queryStartTimeMs)
&& Objects.equals(queryText, that.queryText)
&& Objects.equals(rowsProduced, that.rowsProduced)
@@ -360,7 +346,6 @@ public int hashCode() {
plansState,
queryEndTimeMs,
queryId,
- querySource,
queryStartTimeMs,
queryText,
rowsProduced,
@@ -388,7 +373,6 @@ public String toString() {
.add("plansState", plansState)
.add("queryEndTimeMs", queryEndTimeMs)
.add("queryId", queryId)
- .add("querySource", querySource)
.add("queryStartTimeMs", queryStartTimeMs)
.add("queryText", queryText)
.add("rowsProduced", rowsProduced)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QuerySource.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QuerySource.java
deleted file mode 100755
index fb8eb705..00000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QuerySource.java
+++ /dev/null
@@ -1,353 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.sql;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import java.util.Objects;
-
-@Generated
-public class QuerySource {
- /** UUID */
- @JsonProperty("alert_id")
- private String alertId;
-
- /** Client code that triggered the request */
- @JsonProperty("client_call_context")
- private ClientCallContext clientCallContext;
-
- /** Id associated with a notebook cell */
- @JsonProperty("command_id")
- private String commandId;
-
- /** Id associated with a notebook run or execution */
- @JsonProperty("command_run_id")
- private String commandRunId;
-
- /** UUID */
- @JsonProperty("dashboard_id")
- private String dashboardId;
-
- /** UUID for Lakeview Dashboards, separate from DBSQL Dashboards (dashboard_id) */
- @JsonProperty("dashboard_v3_id")
- private String dashboardV3Id;
-
- /** */
- @JsonProperty("driver_info")
- private QuerySourceDriverInfo driverInfo;
-
- /** Spark service that received and processed the query */
- @JsonProperty("entry_point")
- private QuerySourceEntryPoint entryPoint;
-
- /** UUID for Genie space */
- @JsonProperty("genie_space_id")
- private String genieSpaceId;
-
- /** */
- @JsonProperty("is_cloud_fetch")
- private Boolean isCloudFetch;
-
- /** */
- @JsonProperty("is_databricks_sql_exec_api")
- private Boolean isDatabricksSqlExecApi;
-
- /** */
- @JsonProperty("job_id")
- private String jobId;
-
- /**
- * With background compute, jobs can be managed by different internal teams. When not specified,
- * not a background compute job When specified and the value is not JOBS, it is a background
- * compute job
- */
- @JsonProperty("job_managed_by")
- private QuerySourceJobManager jobManagedBy;
-
- /** */
- @JsonProperty("notebook_id")
- private String notebookId;
-
- /** String provided by a customer that'll help them identify the query */
- @JsonProperty("query_tags")
- private String queryTags;
-
- /** Id associated with a job run or execution */
- @JsonProperty("run_id")
- private String runId;
-
- /** Id associated with a notebook cell run or execution */
- @JsonProperty("runnable_command_id")
- private String runnableCommandId;
-
- /** */
- @JsonProperty("scheduled_by")
- private QuerySourceTrigger scheduledBy;
-
- /** */
- @JsonProperty("serverless_channel_info")
- private ServerlessChannelInfo serverlessChannelInfo;
-
- /** UUID */
- @JsonProperty("source_query_id")
- private String sourceQueryId;
-
- public QuerySource setAlertId(String alertId) {
- this.alertId = alertId;
- return this;
- }
-
- public String getAlertId() {
- return alertId;
- }
-
- public QuerySource setClientCallContext(ClientCallContext clientCallContext) {
- this.clientCallContext = clientCallContext;
- return this;
- }
-
- public ClientCallContext getClientCallContext() {
- return clientCallContext;
- }
-
- public QuerySource setCommandId(String commandId) {
- this.commandId = commandId;
- return this;
- }
-
- public String getCommandId() {
- return commandId;
- }
-
- public QuerySource setCommandRunId(String commandRunId) {
- this.commandRunId = commandRunId;
- return this;
- }
-
- public String getCommandRunId() {
- return commandRunId;
- }
-
- public QuerySource setDashboardId(String dashboardId) {
- this.dashboardId = dashboardId;
- return this;
- }
-
- public String getDashboardId() {
- return dashboardId;
- }
-
- public QuerySource setDashboardV3Id(String dashboardV3Id) {
- this.dashboardV3Id = dashboardV3Id;
- return this;
- }
-
- public String getDashboardV3Id() {
- return dashboardV3Id;
- }
-
- public QuerySource setDriverInfo(QuerySourceDriverInfo driverInfo) {
- this.driverInfo = driverInfo;
- return this;
- }
-
- public QuerySourceDriverInfo getDriverInfo() {
- return driverInfo;
- }
-
- public QuerySource setEntryPoint(QuerySourceEntryPoint entryPoint) {
- this.entryPoint = entryPoint;
- return this;
- }
-
- public QuerySourceEntryPoint getEntryPoint() {
- return entryPoint;
- }
-
- public QuerySource setGenieSpaceId(String genieSpaceId) {
- this.genieSpaceId = genieSpaceId;
- return this;
- }
-
- public String getGenieSpaceId() {
- return genieSpaceId;
- }
-
- public QuerySource setIsCloudFetch(Boolean isCloudFetch) {
- this.isCloudFetch = isCloudFetch;
- return this;
- }
-
- public Boolean getIsCloudFetch() {
- return isCloudFetch;
- }
-
- public QuerySource setIsDatabricksSqlExecApi(Boolean isDatabricksSqlExecApi) {
- this.isDatabricksSqlExecApi = isDatabricksSqlExecApi;
- return this;
- }
-
- public Boolean getIsDatabricksSqlExecApi() {
- return isDatabricksSqlExecApi;
- }
-
- public QuerySource setJobId(String jobId) {
- this.jobId = jobId;
- return this;
- }
-
- public String getJobId() {
- return jobId;
- }
-
- public QuerySource setJobManagedBy(QuerySourceJobManager jobManagedBy) {
- this.jobManagedBy = jobManagedBy;
- return this;
- }
-
- public QuerySourceJobManager getJobManagedBy() {
- return jobManagedBy;
- }
-
- public QuerySource setNotebookId(String notebookId) {
- this.notebookId = notebookId;
- return this;
- }
-
- public String getNotebookId() {
- return notebookId;
- }
-
- public QuerySource setQueryTags(String queryTags) {
- this.queryTags = queryTags;
- return this;
- }
-
- public String getQueryTags() {
- return queryTags;
- }
-
- public QuerySource setRunId(String runId) {
- this.runId = runId;
- return this;
- }
-
- public String getRunId() {
- return runId;
- }
-
- public QuerySource setRunnableCommandId(String runnableCommandId) {
- this.runnableCommandId = runnableCommandId;
- return this;
- }
-
- public String getRunnableCommandId() {
- return runnableCommandId;
- }
-
- public QuerySource setScheduledBy(QuerySourceTrigger scheduledBy) {
- this.scheduledBy = scheduledBy;
- return this;
- }
-
- public QuerySourceTrigger getScheduledBy() {
- return scheduledBy;
- }
-
- public QuerySource setServerlessChannelInfo(ServerlessChannelInfo serverlessChannelInfo) {
- this.serverlessChannelInfo = serverlessChannelInfo;
- return this;
- }
-
- public ServerlessChannelInfo getServerlessChannelInfo() {
- return serverlessChannelInfo;
- }
-
- public QuerySource setSourceQueryId(String sourceQueryId) {
- this.sourceQueryId = sourceQueryId;
- return this;
- }
-
- public String getSourceQueryId() {
- return sourceQueryId;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- QuerySource that = (QuerySource) o;
- return Objects.equals(alertId, that.alertId)
- && Objects.equals(clientCallContext, that.clientCallContext)
- && Objects.equals(commandId, that.commandId)
- && Objects.equals(commandRunId, that.commandRunId)
- && Objects.equals(dashboardId, that.dashboardId)
- && Objects.equals(dashboardV3Id, that.dashboardV3Id)
- && Objects.equals(driverInfo, that.driverInfo)
- && Objects.equals(entryPoint, that.entryPoint)
- && Objects.equals(genieSpaceId, that.genieSpaceId)
- && Objects.equals(isCloudFetch, that.isCloudFetch)
- && Objects.equals(isDatabricksSqlExecApi, that.isDatabricksSqlExecApi)
- && Objects.equals(jobId, that.jobId)
- && Objects.equals(jobManagedBy, that.jobManagedBy)
- && Objects.equals(notebookId, that.notebookId)
- && Objects.equals(queryTags, that.queryTags)
- && Objects.equals(runId, that.runId)
- && Objects.equals(runnableCommandId, that.runnableCommandId)
- && Objects.equals(scheduledBy, that.scheduledBy)
- && Objects.equals(serverlessChannelInfo, that.serverlessChannelInfo)
- && Objects.equals(sourceQueryId, that.sourceQueryId);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(
- alertId,
- clientCallContext,
- commandId,
- commandRunId,
- dashboardId,
- dashboardV3Id,
- driverInfo,
- entryPoint,
- genieSpaceId,
- isCloudFetch,
- isDatabricksSqlExecApi,
- jobId,
- jobManagedBy,
- notebookId,
- queryTags,
- runId,
- runnableCommandId,
- scheduledBy,
- serverlessChannelInfo,
- sourceQueryId);
- }
-
- @Override
- public String toString() {
- return new ToStringer(QuerySource.class)
- .add("alertId", alertId)
- .add("clientCallContext", clientCallContext)
- .add("commandId", commandId)
- .add("commandRunId", commandRunId)
- .add("dashboardId", dashboardId)
- .add("dashboardV3Id", dashboardV3Id)
- .add("driverInfo", driverInfo)
- .add("entryPoint", entryPoint)
- .add("genieSpaceId", genieSpaceId)
- .add("isCloudFetch", isCloudFetch)
- .add("isDatabricksSqlExecApi", isDatabricksSqlExecApi)
- .add("jobId", jobId)
- .add("jobManagedBy", jobManagedBy)
- .add("notebookId", notebookId)
- .add("queryTags", queryTags)
- .add("runId", runId)
- .add("runnableCommandId", runnableCommandId)
- .add("scheduledBy", scheduledBy)
- .add("serverlessChannelInfo", serverlessChannelInfo)
- .add("sourceQueryId", sourceQueryId)
- .toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QuerySourceDriverInfo.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QuerySourceDriverInfo.java
deleted file mode 100755
index 6d6897cc..00000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QuerySourceDriverInfo.java
+++ /dev/null
@@ -1,89 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.sql;
-
-import com.databricks.sdk.support.Generated;
-import com.databricks.sdk.support.ToStringer;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import java.util.Objects;
-
-@Generated
-public class QuerySourceDriverInfo {
- /** */
- @JsonProperty("bi_tool_entry")
- private String biToolEntry;
-
- /** */
- @JsonProperty("driver_name")
- private String driverName;
-
- /** */
- @JsonProperty("simba_branding_vendor")
- private String simbaBrandingVendor;
-
- /** */
- @JsonProperty("version_number")
- private String versionNumber;
-
- public QuerySourceDriverInfo setBiToolEntry(String biToolEntry) {
- this.biToolEntry = biToolEntry;
- return this;
- }
-
- public String getBiToolEntry() {
- return biToolEntry;
- }
-
- public QuerySourceDriverInfo setDriverName(String driverName) {
- this.driverName = driverName;
- return this;
- }
-
- public String getDriverName() {
- return driverName;
- }
-
- public QuerySourceDriverInfo setSimbaBrandingVendor(String simbaBrandingVendor) {
- this.simbaBrandingVendor = simbaBrandingVendor;
- return this;
- }
-
- public String getSimbaBrandingVendor() {
- return simbaBrandingVendor;
- }
-
- public QuerySourceDriverInfo setVersionNumber(String versionNumber) {
- this.versionNumber = versionNumber;
- return this;
- }
-
- public String getVersionNumber() {
- return versionNumber;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- QuerySourceDriverInfo that = (QuerySourceDriverInfo) o;
- return Objects.equals(biToolEntry, that.biToolEntry)
- && Objects.equals(driverName, that.driverName)
- && Objects.equals(simbaBrandingVendor, that.simbaBrandingVendor)
- && Objects.equals(versionNumber, that.versionNumber);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(biToolEntry, driverName, simbaBrandingVendor, versionNumber);
- }
-
- @Override
- public String toString() {
- return new ToStringer(QuerySourceDriverInfo.class)
- .add("biToolEntry", biToolEntry)
- .add("driverName", driverName)
- .add("simbaBrandingVendor", simbaBrandingVendor)
- .add("versionNumber", versionNumber)
- .toString();
- }
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QuerySourceEntryPoint.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QuerySourceEntryPoint.java
deleted file mode 100755
index 6fa26f55..00000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QuerySourceEntryPoint.java
+++ /dev/null
@@ -1,13 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.sql;
-
-import com.databricks.sdk.support.Generated;
-
-/** Spark service that received and processed the query */
-@Generated
-public enum QuerySourceEntryPoint {
- DLT,
- SPARK_CONNECT,
- THRIFT_SERVER,
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QuerySourceJobManager.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QuerySourceJobManager.java
deleted file mode 100755
index b5492fff..00000000
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/QuerySourceJobManager.java
+++ /dev/null
@@ -1,26 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package com.databricks.sdk.service.sql;
-
-import com.databricks.sdk.support.Generated;
-
-/**
- * Copied from elastic-spark-common/api/messages/manager.proto with enum values changed by 1 to
- * accommodate JOB_MANAGER_UNSPECIFIED
- */
-@Generated
-public enum QuerySourceJobManager {
- APP_SYSTEM_TABLE,
- AUTOML,
- AUTO_MAINTENANCE,
- CLEAN_ROOMS,
- DATA_MONITORING,
- DATA_SHARING,
- ENCRYPTION,
- FABRIC_CRAWLER,
- JOBS,
- LAKEVIEW,
- MANAGED_RAG,
- SCHEDULED_MV_REFRESH,
- TESTING,
-}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionAPI.java
index f6524775..8e579b47 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionAPI.java
@@ -89,7 +89,9 @@
* server-side, and cannot account for things such as caller delays and network latency from caller
* to service. - The system will auto-close a statement after one hour if the client stops polling
* and thus you must poll at least once an hour. - The results are only available for one hour after
- * success; polling does not extend this.
+ * success; polling does not extend this. - The SQL Execution API must be used for the entire
+ * lifecycle of the statement. For example, you cannot use the Jobs API to execute the command, and
+ * then the SQL Execution API to cancel it.
*
* [Apache Arrow Columnar]: https://arrow.apache.org/overview/ [Databricks SQL Statement
* Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionService.java
index 4cbc13e7..b22e9dbe 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sql/StatementExecutionService.java
@@ -86,7 +86,9 @@
* server-side, and cannot account for things such as caller delays and network latency from caller
* to service. - The system will auto-close a statement after one hour if the client stops polling
* and thus you must poll at least once an hour. - The results are only available for one hour after
- * success; polling does not extend this.
+ * success; polling does not extend this. - The SQL Execution API must be used for the entire
+ * lifecycle of the statement. For example, you cannot use the Jobs API to execute the command, and
+ * then the SQL Execution API to cancel it.
*
* [Apache Arrow Columnar]: https://arrow.apache.org/overview/ [Databricks SQL Statement
* Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html