diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index 62eb1dbba..3361d4966 100644
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-69902d1abe35bd9e78e0231927bf14d11b383a16
\ No newline at end of file
+file:/Users/tanmay.rustagi/emu/universe/bazel-bin/openapi/all-internal.json
\ No newline at end of file
diff --git a/.gitattributes b/.gitattributes
index 0b993184b..4f85f95e8 100755
--- a/.gitattributes
+++ b/.gitattributes
@@ -157,6 +157,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/UsageDashbo
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedCreateLogDeliveryConfiguration.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfiguration.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/billing/WrappedLogDeliveryConfigurations.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccessRequestDestinations.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsAPI.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsImpl.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccountMetastoreAssignmentsService.java linguist-generated=true
@@ -193,6 +194,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureManage
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureQueueStorage.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureServicePrincipal.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AzureUserDelegationSas.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/BatchCreateAccessRequestsRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/BatchCreateAccessRequestsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CancelRefreshRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogInfo.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CatalogIsolationMode.java linguist-generated=true
@@ -212,6 +215,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Connections
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsImpl.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ConnectionsService.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ContinuousUpdateStatus.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccessRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccessRequestResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCatalog.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateConnection.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateCredentialRequest.java linguist-generated=true
@@ -272,6 +277,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeltaRuntim
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DeltaSharingScopeEnum.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Dependency.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DependencyList.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DestinationType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DisableRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePermissionsList.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/EffectivePredictiveOptimizationFlag.java linguist-generated=true
@@ -329,6 +335,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTem
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryServiceCredentialRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GenerateTemporaryTableCredentialResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccessRequestDestinationsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreAssignmentRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountMetastoreRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccountStorageCredentialRequest.java linguist-generated=true
@@ -432,6 +439,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorRefr
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorSnapshot.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/MonitorTimeSeries.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NamedTableConstraint.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NotificationDestination.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTable.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpec.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OnlineTableSpecContinuousSchedulingPolicy.java linguist-generated=true
@@ -447,6 +455,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/OptionSpecO
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsChange.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PipelineProgress.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrimaryKeyConstraint.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Principal.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrincipalType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Privilege.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrivilegeAssignment.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ProvisioningInfo.java linguist-generated=true
@@ -465,6 +475,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredM
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsAPI.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsImpl.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RegisteredModelsService.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RequestForAccessAPI.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RequestForAccessImpl.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RequestForAccessService.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ResourceQuotasAPI.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ResourceQuotasImpl.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/ResourceQuotasService.java linguist-generated=true
@@ -473,11 +486,15 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemaInfo.
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasAPI.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasImpl.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SchemasService.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Securable.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKind.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableKindManifest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableNotificationDestinations.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurablePermissions.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetArtifactAllowlist.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SetRegisteredModelAliasRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SpecialDestination.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SseEncryptionDetails.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SseEncryptionDetailsAlgorithm.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/StorageCredentialInfo.java linguist-generated=true
@@ -510,6 +527,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTa
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TemporaryTableCredentialsService.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/TriggeredUpdateStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UnassignRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAccessRequestDestinationsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalog.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateCatalogWorkspaceBindingsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateConnection.java linguist-generated=true
@@ -657,6 +675,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ContextStat
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateCluster.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateClusterResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateContext.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateDefaultBaseEnvironmentRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePool.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePoolResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreatePolicy.java linguist-generated=true
@@ -668,7 +687,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataPlaneEv
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataPlaneEventDetailsEventType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DataSecurityMode.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DbfsStorageInfo.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironment.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironmentCache.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironmentCacheStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteCluster.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteDefaultBaseEnvironmentRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteGlobalInitScriptRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteInstancePool.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeletePolicy.java linguist-generated=true
@@ -767,6 +790,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListCluster
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersSortBy.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersSortByDirection.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListClustersSortByField.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListDefaultBaseEnvironmentsRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListDefaultBaseEnvironmentsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListGlobalInitScriptsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListInstancePools.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListInstanceProfilesResponse.java linguist-generated=true
@@ -779,9 +804,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListSortOrd
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LocalFileInfo.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogAnalyticsInfo.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LogSyncStatus.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/MaterializedEnvironment.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/MavenLibrary.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeInstanceType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeType.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeTypeFlexibility.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PendingInstanceError.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PermanentDeleteCluster.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PinCluster.java linguist-generated=true
@@ -795,6 +822,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamil
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PolicyFamily.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/PythonPyPiLibrary.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RCranLibrary.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RefreshDefaultBaseEnvironmentsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RemoveInstanceProfile.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ResizeCluster.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RestartCluster.java linguist-generated=true
@@ -814,11 +842,15 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UninstallLi
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UnpinCluster.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateCluster.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateClusterResource.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateDefaultBaseEnvironmentRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/VolumesStorageInfo.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkloadType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/WorkspaceStorageInfo.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetails.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/AuthorizationDetailsGrantRule.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelPublishedQueryExecutionRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponseStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateDashboardRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateScheduleRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CreateSubscriptionRequest.java linguist-generated=true
@@ -827,6 +859,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Dashboar
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DashboardView.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteScheduleRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/DeleteSubscriptionRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Empty.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecutePublishedDashboardQueryRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAttachment.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieConversation.java linguist-generated=true
@@ -835,7 +869,11 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieCre
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieDeleteConversationRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageAttachmentQueryRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieExecuteMessageQueryRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetConversationMessageRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageAttachmentQueryResultRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetMessageQueryResultResponse.java linguist-generated=true
@@ -855,6 +893,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieSta
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieStartConversationResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieTrashSpaceRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetDashboardRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardTokenInfoResponse.java linguist-generated=true
@@ -877,8 +916,16 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageE
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageErrorType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MessageStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/MigrateDashboardRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PendingStatus.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollPublishedQueryStatusRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponseData.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PublishedDashboard.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionAPI.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionImpl.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionService.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryResponseStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Result.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Schedule.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SchedulePauseStatus.java linguist-generated=true
@@ -886,6 +933,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Subscrib
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Subscription.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberDestination.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SubscriptionSubscriberUser.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SuccessStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TextAttachment.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/TrashDashboardRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/UnpublishDashboardRequest.java linguist-generated=true
@@ -915,6 +963,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteData
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteDatabaseTableRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeleteSyncedDatabaseTableRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DeltaTableSyncInfo.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FailoverDatabaseInstanceRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FindDatabaseInstanceByUidRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GenerateDatabaseCredentialRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseCatalogRequest.java linguist-generated=true
@@ -922,10 +971,14 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabas
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseInstanceRoleRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetDatabaseTableRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/GetSyncedDatabaseTableRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseCatalogsRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseCatalogsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstanceRolesRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstanceRolesResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseInstancesResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListSyncedDatabaseTablesRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListSyncedDatabaseTablesResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ProvisioningInfoState.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/RequestedClaims.java linguist-generated=true
@@ -942,7 +995,9 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTabl
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableState.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedTableTriggeredUpdateStatus.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseCatalogRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseInstanceRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateSyncedDatabaseTableRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/AddBlock.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Close.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/files/Create.java linguist-generated=true
@@ -1189,6 +1244,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsReques
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListJobsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ListRunsResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfiguration.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfigurationCondition.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/NotebookOutput.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/NotebookTask.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/OutputSchemaInfo.java linguist-generated=true
@@ -1268,6 +1325,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Task.java ling
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskDependency.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskEmailNotifications.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskNotificationSettings.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskRetryMode.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationCodeCode.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationDetails.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TerminationTypeType.java linguist-generated=true
@@ -1532,6 +1590,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersion
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLatestVersionsResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelsRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelsRequestResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetMetricHistoryResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetModelResponse.java linguist-generated=true
@@ -1786,6 +1846,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Pipelines
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/ReportSpec.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestartWindow.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestorePipelineRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestorePipelineRequestResponse.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RunAs.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/SchemaSpec.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/Sequencing.java linguist-generated=true
@@ -1883,6 +1945,7 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/Worksp
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesImpl.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/provisioning/WorkspacesService.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionJobType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionRunStatus.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/CreateQualityMonitorRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/DeleteQualityMonitorRequest.java linguist-generated=true
@@ -2335,6 +2398,20 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceN
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkConfigurationImpl.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkConfigurationService.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/settings/WorkspaceNetworkOption.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/AccountSettingsV2API.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/AccountSettingsV2Impl.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/AccountSettingsV2Service.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/BooleanMessage.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/GetPublicAccountSettingRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/GetPublicWorkspaceSettingRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/IntegerMessage.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/PatchPublicAccountSettingRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/PatchPublicWorkspaceSettingRequest.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/Setting.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/StringMessage.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/WorkspaceSettingsV2API.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/WorkspaceSettingsV2Impl.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/WorkspaceSettingsV2Service.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/AuthenticationType.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/ColumnTypeName.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/CreateFederationPolicyRequest.java linguist-generated=true
@@ -2708,6 +2785,8 @@ databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/Pipeli
databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexNextPageRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexResponse.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/RerankerConfig.java linguist-generated=true
+databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/RerankerConfigRerankerParameters.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ResultData.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ResultManifest.java linguist-generated=true
databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/ScanVectorIndexRequest.java linguist-generated=true
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java
index ce93c4612..44bfd8e8f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/AccountClient.java
@@ -69,6 +69,8 @@
import com.databricks.sdk.service.settings.NetworkPoliciesService;
import com.databricks.sdk.service.settings.WorkspaceNetworkConfigurationAPI;
import com.databricks.sdk.service.settings.WorkspaceNetworkConfigurationService;
+import com.databricks.sdk.service.settingsv2.AccountSettingsV2API;
+import com.databricks.sdk.service.settingsv2.AccountSettingsV2Service;
import com.databricks.sdk.support.Generated;
/** Entry point for accessing Databricks account-level APIs */
@@ -99,6 +101,7 @@ public class AccountClient {
private ServicePrincipalSecretsAPI servicePrincipalSecretsAPI;
private AccountServicePrincipalsAPI servicePrincipalsAPI;
private AccountSettingsAPI settingsAPI;
+ private AccountSettingsV2API settingsV2API;
private StorageAPI storageAPI;
private AccountStorageCredentialsAPI storageCredentialsAPI;
private UsageDashboardsAPI usageDashboardsAPI;
@@ -139,6 +142,7 @@ public AccountClient(DatabricksConfig config) {
servicePrincipalSecretsAPI = new ServicePrincipalSecretsAPI(apiClient);
servicePrincipalsAPI = new AccountServicePrincipalsAPI(apiClient);
settingsAPI = new AccountSettingsAPI(apiClient);
+ settingsV2API = new AccountSettingsV2API(apiClient);
storageAPI = new StorageAPI(apiClient);
storageCredentialsAPI = new AccountStorageCredentialsAPI(apiClient);
usageDashboardsAPI = new UsageDashboardsAPI(apiClient);
@@ -524,6 +528,11 @@ public AccountSettingsAPI settings() {
return settingsAPI;
}
+ /** APIs to manage account level settings */
+ public AccountSettingsV2API settingsV2() {
+ return settingsV2API;
+ }
+
/**
* These APIs manage storage configurations for this workspace. A root storage S3 bucket in your
* account is required to store objects like cluster logs, notebook revisions, and job results.
@@ -869,6 +878,17 @@ public AccountClient withSettingsAPI(AccountSettingsAPI accountSettings) {
return this;
}
+ /** Replace the default AccountSettingsV2Service with a custom implementation. */
+ public AccountClient withSettingsV2Impl(AccountSettingsV2Service accountSettingsV2) {
+ return this.withSettingsV2API(new AccountSettingsV2API(accountSettingsV2));
+ }
+
+ /** Replace the default AccountSettingsV2API with a custom implementation. */
+ public AccountClient withSettingsV2API(AccountSettingsV2API accountSettingsV2) {
+ this.settingsV2API = accountSettingsV2;
+ return this;
+ }
+
/** Replace the default StorageService with a custom implementation. */
public AccountClient withStorageImpl(StorageService storage) {
return this.withStorageAPI(new StorageAPI(storage));
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
index 5bac999b6..17c6fbb52 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/WorkspaceClient.java
@@ -40,6 +40,8 @@
import com.databricks.sdk.service.catalog.QualityMonitorsService;
import com.databricks.sdk.service.catalog.RegisteredModelsAPI;
import com.databricks.sdk.service.catalog.RegisteredModelsService;
+import com.databricks.sdk.service.catalog.RequestForAccessAPI;
+import com.databricks.sdk.service.catalog.RequestForAccessService;
import com.databricks.sdk.service.catalog.ResourceQuotasAPI;
import com.databricks.sdk.service.catalog.ResourceQuotasService;
import com.databricks.sdk.service.catalog.SchemasAPI;
@@ -87,6 +89,8 @@
import com.databricks.sdk.service.dashboards.LakeviewEmbeddedAPI;
import com.databricks.sdk.service.dashboards.LakeviewEmbeddedService;
import com.databricks.sdk.service.dashboards.LakeviewService;
+import com.databricks.sdk.service.dashboards.QueryExecutionAPI;
+import com.databricks.sdk.service.dashboards.QueryExecutionService;
import com.databricks.sdk.service.database.DatabaseAPI;
import com.databricks.sdk.service.database.DatabaseService;
import com.databricks.sdk.service.files.DbfsService;
@@ -170,6 +174,8 @@
import com.databricks.sdk.service.settings.TokensService;
import com.databricks.sdk.service.settings.WorkspaceConfAPI;
import com.databricks.sdk.service.settings.WorkspaceConfService;
+import com.databricks.sdk.service.settingsv2.WorkspaceSettingsV2API;
+import com.databricks.sdk.service.settingsv2.WorkspaceSettingsV2Service;
import com.databricks.sdk.service.sharing.ProvidersAPI;
import com.databricks.sdk.service.sharing.ProvidersService;
import com.databricks.sdk.service.sharing.RecipientActivationAPI;
@@ -302,6 +308,7 @@ public class WorkspaceClient {
private QualityMonitorsAPI qualityMonitorsAPI;
private QueriesAPI queriesAPI;
private QueriesLegacyAPI queriesLegacyAPI;
+ private QueryExecutionAPI queryExecutionAPI;
private QueryHistoryAPI queryHistoryAPI;
private QueryVisualizationsAPI queryVisualizationsAPI;
private QueryVisualizationsLegacyAPI queryVisualizationsLegacyAPI;
@@ -311,6 +318,7 @@ public class WorkspaceClient {
private RedashConfigAPI redashConfigAPI;
private RegisteredModelsAPI registeredModelsAPI;
private ReposAPI reposAPI;
+ private RequestForAccessAPI requestForAccessAPI;
private ResourceQuotasAPI resourceQuotasAPI;
private SchemasAPI schemasAPI;
private SecretsExt secretsAPI;
@@ -336,6 +344,7 @@ public class WorkspaceClient {
private WorkspaceAPI workspaceAPI;
private WorkspaceBindingsAPI workspaceBindingsAPI;
private WorkspaceConfAPI workspaceConfAPI;
+ private WorkspaceSettingsV2API workspaceSettingsV2API;
private ForecastingAPI forecastingAPI;
public WorkspaceClient() {
@@ -418,6 +427,7 @@ public WorkspaceClient(DatabricksConfig config) {
qualityMonitorsAPI = new QualityMonitorsAPI(apiClient);
queriesAPI = new QueriesAPI(apiClient);
queriesLegacyAPI = new QueriesLegacyAPI(apiClient);
+ queryExecutionAPI = new QueryExecutionAPI(apiClient);
queryHistoryAPI = new QueryHistoryAPI(apiClient);
queryVisualizationsAPI = new QueryVisualizationsAPI(apiClient);
queryVisualizationsLegacyAPI = new QueryVisualizationsLegacyAPI(apiClient);
@@ -427,6 +437,7 @@ public WorkspaceClient(DatabricksConfig config) {
redashConfigAPI = new RedashConfigAPI(apiClient);
registeredModelsAPI = new RegisteredModelsAPI(apiClient);
reposAPI = new ReposAPI(apiClient);
+ requestForAccessAPI = new RequestForAccessAPI(apiClient);
resourceQuotasAPI = new ResourceQuotasAPI(apiClient);
schemasAPI = new SchemasAPI(apiClient);
secretsAPI = new SecretsExt(apiClient);
@@ -453,6 +464,7 @@ public WorkspaceClient(DatabricksConfig config) {
workspaceAPI = new WorkspaceAPI(apiClient);
workspaceBindingsAPI = new WorkspaceBindingsAPI(apiClient);
workspaceConfAPI = new WorkspaceConfAPI(apiClient);
+ workspaceSettingsV2API = new WorkspaceSettingsV2API(apiClient);
forecastingAPI = new ForecastingAPI(apiClient);
}
@@ -1329,6 +1341,11 @@ public QueriesLegacyAPI queriesLegacy() {
return queriesLegacyAPI;
}
+ /** Query execution APIs for AI / BI Dashboards */
+ public QueryExecutionAPI queryExecution() {
+ return queryExecutionAPI;
+ }
+
/**
* A service responsible for storing and retrieving the list of queries run against SQL endpoints
* and serverless compute.
@@ -1471,6 +1488,18 @@ public ReposAPI repos() {
return reposAPI;
}
+ /**
+ * Request for Access enables customers to request access to and manage access request
+ * destinations for Unity Catalog securables.
+ *
+ *
These APIs provide a standardized way to update, get, and request to access request
+ * destinations. Fine-grained authorization ensures that only users with appropriate permissions
+ * can manage access request destinations.
+ */
+ public RequestForAccessAPI requestForAccess() {
+ return requestForAccessAPI;
+ }
+
/**
* Unity Catalog enforces resource quotas on all securable objects, which limits the number of
* resources that can be created. Quotas are expressed in terms of a resource type and a parent
@@ -1864,6 +1893,11 @@ public WorkspaceConfAPI workspaceConf() {
return workspaceConfAPI;
}
+ /** APIs to manage workspace level settings */
+ public WorkspaceSettingsV2API workspaceSettingsV2() {
+ return workspaceSettingsV2API;
+ }
+
/** The Forecasting API allows you to create and get serverless forecasting experiments */
public ForecastingAPI forecasting() {
return forecastingAPI;
@@ -2703,6 +2737,17 @@ public WorkspaceClient withQueriesLegacyAPI(QueriesLegacyAPI queriesLegacy) {
return this;
}
+ /** Replace the default QueryExecutionService with a custom implementation. */
+ public WorkspaceClient withQueryExecutionImpl(QueryExecutionService queryExecution) {
+ return this.withQueryExecutionAPI(new QueryExecutionAPI(queryExecution));
+ }
+
+ /** Replace the default QueryExecutionAPI with a custom implementation. */
+ public WorkspaceClient withQueryExecutionAPI(QueryExecutionAPI queryExecution) {
+ this.queryExecutionAPI = queryExecution;
+ return this;
+ }
+
/** Replace the default QueryHistoryService with a custom implementation. */
public WorkspaceClient withQueryHistoryImpl(QueryHistoryService queryHistory) {
return this.withQueryHistoryAPI(new QueryHistoryAPI(queryHistory));
@@ -2810,6 +2855,17 @@ public WorkspaceClient withReposAPI(ReposAPI repos) {
return this;
}
+ /** Replace the default RequestForAccessService with a custom implementation. */
+ public WorkspaceClient withRequestForAccessImpl(RequestForAccessService requestForAccess) {
+ return this.withRequestForAccessAPI(new RequestForAccessAPI(requestForAccess));
+ }
+
+ /** Replace the default RequestForAccessAPI with a custom implementation. */
+ public WorkspaceClient withRequestForAccessAPI(RequestForAccessAPI requestForAccess) {
+ this.requestForAccessAPI = requestForAccess;
+ return this;
+ }
+
/** Replace the default ResourceQuotasService with a custom implementation. */
public WorkspaceClient withResourceQuotasImpl(ResourceQuotasService resourceQuotas) {
return this.withResourceQuotasAPI(new ResourceQuotasAPI(resourceQuotas));
@@ -3097,6 +3153,18 @@ public WorkspaceClient withWorkspaceConfAPI(WorkspaceConfAPI workspaceConf) {
return this;
}
+ /** Replace the default WorkspaceSettingsV2Service with a custom implementation. */
+ public WorkspaceClient withWorkspaceSettingsV2Impl(
+ WorkspaceSettingsV2Service workspaceSettingsV2) {
+ return this.withWorkspaceSettingsV2API(new WorkspaceSettingsV2API(workspaceSettingsV2));
+ }
+
+ /** Replace the default WorkspaceSettingsV2API with a custom implementation. */
+ public WorkspaceClient withWorkspaceSettingsV2API(WorkspaceSettingsV2API workspaceSettingsV2) {
+ this.workspaceSettingsV2API = workspaceSettingsV2;
+ return this;
+ }
+
/** Replace the default ForecastingService with a custom implementation. */
public WorkspaceClient withForecastingImpl(ForecastingService forecasting) {
return this.withForecastingAPI(new ForecastingAPI(forecasting));
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccessRequestDestinations.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccessRequestDestinations.java
new file mode 100755
index 000000000..79eca7689
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/AccessRequestDestinations.java
@@ -0,0 +1,79 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class AccessRequestDestinations {
+ /**
+ * Indicates whether any destinations are hidden from the caller due to a lack of permissions.
+ * This value is true if the caller does not have permission to see all destinations.
+ */
+ @JsonProperty("are_any_destinations_hidden")
+ private Boolean areAnyDestinationsHidden;
+
+ /** The access request destinations for the securable. */
+ @JsonProperty("destinations")
+ private Collection destinations;
+
+ /** The securable for which the access request destinations are being retrieved. */
+ @JsonProperty("securable")
+ private Securable securable;
+
+ public AccessRequestDestinations setAreAnyDestinationsHidden(Boolean areAnyDestinationsHidden) {
+ this.areAnyDestinationsHidden = areAnyDestinationsHidden;
+ return this;
+ }
+
+ public Boolean getAreAnyDestinationsHidden() {
+ return areAnyDestinationsHidden;
+ }
+
+ public AccessRequestDestinations setDestinations(
+ Collection destinations) {
+ this.destinations = destinations;
+ return this;
+ }
+
+ public Collection getDestinations() {
+ return destinations;
+ }
+
+ public AccessRequestDestinations setSecurable(Securable securable) {
+ this.securable = securable;
+ return this;
+ }
+
+ public Securable getSecurable() {
+ return securable;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ AccessRequestDestinations that = (AccessRequestDestinations) o;
+ return Objects.equals(areAnyDestinationsHidden, that.areAnyDestinationsHidden)
+ && Objects.equals(destinations, that.destinations)
+ && Objects.equals(securable, that.securable);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(areAnyDestinationsHidden, destinations, securable);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(AccessRequestDestinations.class)
+ .add("areAnyDestinationsHidden", areAnyDestinationsHidden)
+ .add("destinations", destinations)
+ .add("securable", securable)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/BatchCreateAccessRequestsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/BatchCreateAccessRequestsRequest.java
new file mode 100755
index 000000000..4fe5dddd5
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/BatchCreateAccessRequestsRequest.java
@@ -0,0 +1,48 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class BatchCreateAccessRequestsRequest {
+ /**
+ * A list of individual access requests, where each request corresponds to a set of permissions
+ * being requested on a list of securables for a specified principal.
+ */
+ @JsonProperty("requests")
+ private Collection requests;
+
+ public BatchCreateAccessRequestsRequest setRequests(Collection requests) {
+ this.requests = requests;
+ return this;
+ }
+
+ public Collection getRequests() {
+ return requests;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ BatchCreateAccessRequestsRequest that = (BatchCreateAccessRequestsRequest) o;
+ return Objects.equals(requests, that.requests);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(requests);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(BatchCreateAccessRequestsRequest.class)
+ .add("requests", requests)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/BatchCreateAccessRequestsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/BatchCreateAccessRequestsResponse.java
new file mode 100755
index 000000000..5f0d00228
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/BatchCreateAccessRequestsResponse.java
@@ -0,0 +1,46 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class BatchCreateAccessRequestsResponse {
+ /** The access request destinations for each securable object the principal requested. */
+ @JsonProperty("responses")
+ private Collection responses;
+
+ public BatchCreateAccessRequestsResponse setResponses(
+ Collection responses) {
+ this.responses = responses;
+ return this;
+ }
+
+ public Collection getResponses() {
+ return responses;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ BatchCreateAccessRequestsResponse that = (BatchCreateAccessRequestsResponse) o;
+ return Objects.equals(responses, that.responses);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(responses);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(BatchCreateAccessRequestsResponse.class)
+ .add("responses", responses)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccessRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccessRequest.java
new file mode 100755
index 000000000..961ed5481
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccessRequest.java
@@ -0,0 +1,79 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class CreateAccessRequest {
+ /**
+ * Optional. The principal this request is for. Empty `behalf_of` defaults to the requester's
+ * identity.
+ */
+ @JsonProperty("behalf_of")
+ private Principal behalfOf;
+
+ /** Optional. Comment associated with the request. */
+ @JsonProperty("comment")
+ private String comment;
+
+ /** List of securables and their corresponding requested UC privileges. */
+ @JsonProperty("securable_permissions")
+ private Collection securablePermissions;
+
+ public CreateAccessRequest setBehalfOf(Principal behalfOf) {
+ this.behalfOf = behalfOf;
+ return this;
+ }
+
+ public Principal getBehalfOf() {
+ return behalfOf;
+ }
+
+ public CreateAccessRequest setComment(String comment) {
+ this.comment = comment;
+ return this;
+ }
+
+ public String getComment() {
+ return comment;
+ }
+
+ public CreateAccessRequest setSecurablePermissions(
+ Collection securablePermissions) {
+ this.securablePermissions = securablePermissions;
+ return this;
+ }
+
+ public Collection getSecurablePermissions() {
+ return securablePermissions;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateAccessRequest that = (CreateAccessRequest) o;
+ return Objects.equals(behalfOf, that.behalfOf)
+ && Objects.equals(comment, that.comment)
+ && Objects.equals(securablePermissions, that.securablePermissions);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(behalfOf, comment, securablePermissions);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateAccessRequest.class)
+ .add("behalfOf", behalfOf)
+ .add("comment", comment)
+ .add("securablePermissions", securablePermissions)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccessRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccessRequestResponse.java
new file mode 100755
index 000000000..636b1984d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/CreateAccessRequestResponse.java
@@ -0,0 +1,79 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class CreateAccessRequestResponse {
+ /**
+ * Indicates whether any destinations are hidden from the caller due to a lack of permissions.
+ * This value is true if the caller does not have permission to see all destinations.
+ */
+ @JsonProperty("are_any_destinations_hidden")
+ private Boolean areAnyDestinationsHidden;
+
+ /** The principal the request was made on behalf of. */
+ @JsonProperty("behalf_of")
+ private Principal behalfOf;
+
+ /** The access request destinations for all the securables the principal requested. */
+ @JsonProperty("destinations")
+ private Collection destinations;
+
+ public CreateAccessRequestResponse setAreAnyDestinationsHidden(Boolean areAnyDestinationsHidden) {
+ this.areAnyDestinationsHidden = areAnyDestinationsHidden;
+ return this;
+ }
+
+ public Boolean getAreAnyDestinationsHidden() {
+ return areAnyDestinationsHidden;
+ }
+
+ public CreateAccessRequestResponse setBehalfOf(Principal behalfOf) {
+ this.behalfOf = behalfOf;
+ return this;
+ }
+
+ public Principal getBehalfOf() {
+ return behalfOf;
+ }
+
+ public CreateAccessRequestResponse setDestinations(
+ Collection destinations) {
+ this.destinations = destinations;
+ return this;
+ }
+
+ public Collection getDestinations() {
+ return destinations;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateAccessRequestResponse that = (CreateAccessRequestResponse) o;
+ return Objects.equals(areAnyDestinationsHidden, that.areAnyDestinationsHidden)
+ && Objects.equals(behalfOf, that.behalfOf)
+ && Objects.equals(destinations, that.destinations);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(areAnyDestinationsHidden, behalfOf, destinations);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateAccessRequestResponse.class)
+ .add("areAnyDestinationsHidden", areAnyDestinationsHidden)
+ .add("behalfOf", behalfOf)
+ .add("destinations", destinations)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DestinationType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DestinationType.java
new file mode 100755
index 000000000..b5dc069b9
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/DestinationType.java
@@ -0,0 +1,14 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum DestinationType {
+ EMAIL,
+ GENERIC_WEBHOOK,
+ MICROSOFT_TEAMS,
+ SLACK,
+ URL,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccessRequestDestinationsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccessRequestDestinationsRequest.java
new file mode 100755
index 000000000..2d974376c
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetAccessRequestDestinationsRequest.java
@@ -0,0 +1,57 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetAccessRequestDestinationsRequest {
+ /** */
+ @JsonIgnore private String fullName;
+
+ /** */
+ @JsonIgnore private String securableType;
+
+ public GetAccessRequestDestinationsRequest setFullName(String fullName) {
+ this.fullName = fullName;
+ return this;
+ }
+
+ public String getFullName() {
+ return fullName;
+ }
+
+ public GetAccessRequestDestinationsRequest setSecurableType(String securableType) {
+ this.securableType = securableType;
+ return this;
+ }
+
+ public String getSecurableType() {
+ return securableType;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetAccessRequestDestinationsRequest that = (GetAccessRequestDestinationsRequest) o;
+ return Objects.equals(fullName, that.fullName)
+ && Objects.equals(securableType, that.securableType);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(fullName, securableType);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetAccessRequestDestinationsRequest.class)
+ .add("fullName", fullName)
+ .add("securableType", securableType)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java
index 59e2565c2..4e83c75d1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/GetGrantRequest.java
@@ -13,6 +13,11 @@ public class GetGrantRequest {
/** Full name of securable. */
@JsonIgnore private String fullName;
+ /** Optional. If true, also return privilege assignments whose principals have been deleted. */
+ @JsonIgnore
+ @QueryParam("include_deleted_principals")
+ private Boolean includeDeletedPrincipals;
+
/**
* Specifies the maximum number of privileges to return (page length). Every PrivilegeAssignment
* present in a single page response is guaranteed to contain all the privileges granted on the
@@ -50,6 +55,15 @@ public String getFullName() {
return fullName;
}
+ public GetGrantRequest setIncludeDeletedPrincipals(Boolean includeDeletedPrincipals) {
+ this.includeDeletedPrincipals = includeDeletedPrincipals;
+ return this;
+ }
+
+ public Boolean getIncludeDeletedPrincipals() {
+ return includeDeletedPrincipals;
+ }
+
public GetGrantRequest setMaxResults(Long maxResults) {
this.maxResults = maxResults;
return this;
@@ -92,6 +106,7 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
GetGrantRequest that = (GetGrantRequest) o;
return Objects.equals(fullName, that.fullName)
+ && Objects.equals(includeDeletedPrincipals, that.includeDeletedPrincipals)
&& Objects.equals(maxResults, that.maxResults)
&& Objects.equals(pageToken, that.pageToken)
&& Objects.equals(principal, that.principal)
@@ -100,13 +115,15 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
- return Objects.hash(fullName, maxResults, pageToken, principal, securableType);
+ return Objects.hash(
+ fullName, includeDeletedPrincipals, maxResults, pageToken, principal, securableType);
}
@Override
public String toString() {
return new ToStringer(GetGrantRequest.class)
.add("fullName", fullName)
+ .add("includeDeletedPrincipals", includeDeletedPrincipals)
.add("maxResults", maxResults)
.add("pageToken", pageToken)
.add("principal", principal)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NotificationDestination.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NotificationDestination.java
new file mode 100755
index 000000000..fc0ffa38b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/NotificationDestination.java
@@ -0,0 +1,77 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class NotificationDestination {
+ /** The unique identifier for the destination. */
+ @JsonProperty("destination_id")
+ private String destinationId;
+
+ /** The type of the destination. */
+ @JsonProperty("destination_type")
+ private DestinationType destinationType;
+
+ /**
+ * This field is used to denote whether the destination is the email of the owner of the securable
+ * object.
+ */
+ @JsonProperty("special_destination")
+ private SpecialDestination specialDestination;
+
+ public NotificationDestination setDestinationId(String destinationId) {
+ this.destinationId = destinationId;
+ return this;
+ }
+
+ public String getDestinationId() {
+ return destinationId;
+ }
+
+ public NotificationDestination setDestinationType(DestinationType destinationType) {
+ this.destinationType = destinationType;
+ return this;
+ }
+
+ public DestinationType getDestinationType() {
+ return destinationType;
+ }
+
+ public NotificationDestination setSpecialDestination(SpecialDestination specialDestination) {
+ this.specialDestination = specialDestination;
+ return this;
+ }
+
+ public SpecialDestination getSpecialDestination() {
+ return specialDestination;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ NotificationDestination that = (NotificationDestination) o;
+ return Objects.equals(destinationId, that.destinationId)
+ && Objects.equals(destinationType, that.destinationType)
+ && Objects.equals(specialDestination, that.specialDestination);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(destinationId, destinationType, specialDestination);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(NotificationDestination.class)
+ .add("destinationId", destinationId)
+ .add("destinationType", destinationType)
+ .add("specialDestination", specialDestination)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsChange.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsChange.java
index cbd425817..9860e1026 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsChange.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PermissionsChange.java
@@ -21,6 +21,18 @@ public class PermissionsChange {
@JsonProperty("principal")
private String principal;
+ /**
+ * An opaque internal ID that identifies the principal whose privileges should be removed.
+ *
+ * This field is intended for removing privileges associated with a deleted user. When set,
+ * only the entries specified in the remove field are processed; any entries in the add field will
+ * be rejected.
+ *
+ *
Only one of principal or principal_id should be specified, never both at the same time.
+ */
+ @JsonProperty("principal_id")
+ private Long principalId;
+
/** The set of privileges to remove. */
@JsonProperty("remove")
private Collection remove;
@@ -43,6 +55,15 @@ public String getPrincipal() {
return principal;
}
+ public PermissionsChange setPrincipalId(Long principalId) {
+ this.principalId = principalId;
+ return this;
+ }
+
+ public Long getPrincipalId() {
+ return principalId;
+ }
+
public PermissionsChange setRemove(Collection remove) {
this.remove = remove;
return this;
@@ -59,12 +80,13 @@ public boolean equals(Object o) {
PermissionsChange that = (PermissionsChange) o;
return Objects.equals(add, that.add)
&& Objects.equals(principal, that.principal)
+ && Objects.equals(principalId, that.principalId)
&& Objects.equals(remove, that.remove);
}
@Override
public int hashCode() {
- return Objects.hash(add, principal, remove);
+ return Objects.hash(add, principal, principalId, remove);
}
@Override
@@ -72,6 +94,7 @@ public String toString() {
return new ToStringer(PermissionsChange.class)
.add("add", add)
.add("principal", principal)
+ .add("principalId", principalId)
.add("remove", remove)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Principal.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Principal.java
new file mode 100755
index 000000000..55ed154c7
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Principal.java
@@ -0,0 +1,58 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class Principal {
+ /** Databricks user, group or service principal ID. */
+ @JsonProperty("id")
+ private String id;
+
+ /** */
+ @JsonProperty("principal_type")
+ private PrincipalType principalType;
+
+ public Principal setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public Principal setPrincipalType(PrincipalType principalType) {
+ this.principalType = principalType;
+ return this;
+ }
+
+ public PrincipalType getPrincipalType() {
+ return principalType;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ Principal that = (Principal) o;
+ return Objects.equals(id, that.id) && Objects.equals(principalType, that.principalType);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(id, principalType);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(Principal.class)
+ .add("id", id)
+ .add("principalType", principalType)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrincipalType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrincipalType.java
new file mode 100755
index 000000000..03cdcadca
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrincipalType.java
@@ -0,0 +1,12 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum PrincipalType {
+ GROUP_PRINCIPAL,
+ SERVICE_PRINCIPAL,
+ USER_PRINCIPAL,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrivilegeAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrivilegeAssignment.java
index 3781e98f5..4dd3f9910 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrivilegeAssignment.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/PrivilegeAssignment.java
@@ -17,6 +17,13 @@ public class PrivilegeAssignment {
@JsonProperty("principal")
private String principal;
+ /**
+ * Unique identifier of the principal. For active principals, both `principal` and `principal_id`
+ * are present.
+ */
+ @JsonProperty("principal_id")
+ private Long principalId;
+
/** The privileges assigned to the principal. */
@JsonProperty("privileges")
private Collection privileges;
@@ -30,6 +37,15 @@ public String getPrincipal() {
return principal;
}
+ public PrivilegeAssignment setPrincipalId(Long principalId) {
+ this.principalId = principalId;
+ return this;
+ }
+
+ public Long getPrincipalId() {
+ return principalId;
+ }
+
public PrivilegeAssignment setPrivileges(Collection privileges) {
this.privileges = privileges;
return this;
@@ -44,18 +60,21 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
PrivilegeAssignment that = (PrivilegeAssignment) o;
- return Objects.equals(principal, that.principal) && Objects.equals(privileges, that.privileges);
+ return Objects.equals(principal, that.principal)
+ && Objects.equals(principalId, that.principalId)
+ && Objects.equals(privileges, that.privileges);
}
@Override
public int hashCode() {
- return Objects.hash(principal, privileges);
+ return Objects.hash(principal, principalId, privileges);
}
@Override
public String toString() {
return new ToStringer(PrivilegeAssignment.class)
.add("principal", principal)
+ .add("principalId", principalId)
.add("privileges", privileges)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RequestForAccessAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RequestForAccessAPI.java
new file mode 100755
index 000000000..9335b9c73
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RequestForAccessAPI.java
@@ -0,0 +1,82 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Request for Access enables customers to request access to and manage access request destinations
+ * for Unity Catalog securables.
+ *
+ * These APIs provide a standardized way to update, get, and request to access request
+ * destinations. Fine-grained authorization ensures that only users with appropriate permissions can
+ * manage access request destinations.
+ */
+@Generated
+public class RequestForAccessAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(RequestForAccessAPI.class);
+
+ private final RequestForAccessService impl;
+
+ /** Regular-use constructor */
+ public RequestForAccessAPI(ApiClient apiClient) {
+ impl = new RequestForAccessImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public RequestForAccessAPI(RequestForAccessService mock) {
+ impl = mock;
+ }
+
+ /**
+ * Creates an access request for a Unity Catalog permissions for a specified principal on a
+ * securable object. This Batch API can take in multiple principals, securable objects, and
+ * permissions as the input and returns the access request destinations for each.
+ */
+ public BatchCreateAccessRequestsResponse batchCreateAccessRequests(
+ BatchCreateAccessRequestsRequest request) {
+ return impl.batchCreateAccessRequests(request);
+ }
+
+ public AccessRequestDestinations getAccessRequestDestinations(
+ String securableType, String fullName) {
+ return getAccessRequestDestinations(
+ new GetAccessRequestDestinationsRequest()
+ .setSecurableType(securableType)
+ .setFullName(fullName));
+ }
+
+ /**
+ * Gets an array of access request destinations for the specified securable. Any caller can see
+ * URL destinations or the destinations on the metastore. Otherwise, only those with **BROWSE**
+ * permissions on the securable can see destinations.
+ */
+ public AccessRequestDestinations getAccessRequestDestinations(
+ GetAccessRequestDestinationsRequest request) {
+ return impl.getAccessRequestDestinations(request);
+ }
+
+ public AccessRequestDestinations updateAccessRequestDestinations(
+ AccessRequestDestinations accessRequestDestinations, String updateMask) {
+ return updateAccessRequestDestinations(
+ new UpdateAccessRequestDestinationsRequest()
+ .setAccessRequestDestinations(accessRequestDestinations)
+ .setUpdateMask(updateMask));
+ }
+
+ /**
+ * Updates the access request destinations for the given securable. The caller must be a metastore
+ * admin, the owner of the securable, or a user that has the **MANAGE** privilege on the securable
+ * in order to assign destinations.
+ */
+ public AccessRequestDestinations updateAccessRequestDestinations(
+ UpdateAccessRequestDestinationsRequest request) {
+ return impl.updateAccessRequestDestinations(request);
+ }
+
+ public RequestForAccessService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RequestForAccessImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RequestForAccessImpl.java
new file mode 100755
index 000000000..82a1bbd97
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RequestForAccessImpl.java
@@ -0,0 +1,65 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of RequestForAccess */
+@Generated
+class RequestForAccessImpl implements RequestForAccessService {
+ private final ApiClient apiClient;
+
+ public RequestForAccessImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public BatchCreateAccessRequestsResponse batchCreateAccessRequests(
+ BatchCreateAccessRequestsRequest request) {
+ String path = "/api/3.0/rfa/requests";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, BatchCreateAccessRequestsResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public AccessRequestDestinations getAccessRequestDestinations(
+ GetAccessRequestDestinationsRequest request) {
+ String path =
+ String.format(
+ "/api/3.0/rfa/destinations/%s/%s", request.getSecurableType(), request.getFullName());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, AccessRequestDestinations.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public AccessRequestDestinations updateAccessRequestDestinations(
+ UpdateAccessRequestDestinationsRequest request) {
+ String path = "/api/3.0/rfa/destinations";
+ try {
+ Request req =
+ new Request("PATCH", path, apiClient.serialize(request.getAccessRequestDestinations()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, AccessRequestDestinations.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RequestForAccessService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RequestForAccessService.java
new file mode 100755
index 000000000..b4eb77a17
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/RequestForAccessService.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Request for Access enables customers to request access to and manage access request destinations
+ * for Unity Catalog securables.
+ *
+ *
These APIs provide a standardized way to update, get, and request to access request
+ * destinations. Fine-grained authorization ensures that only users with appropriate permissions can
+ * manage access request destinations.
+ *
+ *
This is the high-level interface, that contains generated methods.
+ *
+ *
Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface RequestForAccessService {
+ /**
+ * Creates an access request for a Unity Catalog permissions for a specified principal on a
+ * securable object. This Batch API can take in multiple principals, securable objects, and
+ * permissions as the input and returns the access request destinations for each.
+ */
+ BatchCreateAccessRequestsResponse batchCreateAccessRequests(
+ BatchCreateAccessRequestsRequest batchCreateAccessRequestsRequest);
+
+ /**
+ * Gets an array of access request destinations for the specified securable. Any caller can see
+ * URL destinations or the destinations on the metastore. Otherwise, only those with **BROWSE**
+ * permissions on the securable can see destinations.
+ */
+ AccessRequestDestinations getAccessRequestDestinations(
+ GetAccessRequestDestinationsRequest getAccessRequestDestinationsRequest);
+
+ /**
+ * Updates the access request destinations for the given securable. The caller must be a metastore
+ * admin, the owner of the securable, or a user that has the **MANAGE** privilege on the securable
+ * in order to assign destinations.
+ */
+ AccessRequestDestinations updateAccessRequestDestinations(
+ UpdateAccessRequestDestinationsRequest updateAccessRequestDestinationsRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Securable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Securable.java
new file mode 100755
index 000000000..869984ba5
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/Securable.java
@@ -0,0 +1,83 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/**
+ * Generic definition of a securable, which is uniquely defined in a metastore by its type and full
+ * name.
+ */
+@Generated
+public class Securable {
+ /** Required. The full name of the catalog/schema/table. Optional if resource_name is present. */
+ @JsonProperty("full_name")
+ private String fullName;
+
+ /**
+ * Optional. The name of the Share object that contains the securable when the securable is
+ * getting shared in D2D Delta Sharing.s
+ */
+ @JsonProperty("provider_share")
+ private String providerShare;
+
+ /**
+ * Required. The type of securable (catalog/schema/table). Optional if resource_name is present.
+ */
+ @JsonProperty("type")
+ private SecurableType typeValue;
+
+ public Securable setFullName(String fullName) {
+ this.fullName = fullName;
+ return this;
+ }
+
+ public String getFullName() {
+ return fullName;
+ }
+
+ public Securable setProviderShare(String providerShare) {
+ this.providerShare = providerShare;
+ return this;
+ }
+
+ public String getProviderShare() {
+ return providerShare;
+ }
+
+ public Securable setType(SecurableType typeValue) {
+ this.typeValue = typeValue;
+ return this;
+ }
+
+ public SecurableType getType() {
+ return typeValue;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ Securable that = (Securable) o;
+ return Objects.equals(fullName, that.fullName)
+ && Objects.equals(providerShare, that.providerShare)
+ && Objects.equals(typeValue, that.typeValue);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(fullName, providerShare, typeValue);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(Securable.class)
+ .add("fullName", fullName)
+ .add("providerShare", providerShare)
+ .add("typeValue", typeValue)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableNotificationDestinations.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableNotificationDestinations.java
new file mode 100755
index 000000000..90580bd32
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurableNotificationDestinations.java
@@ -0,0 +1,60 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class SecurableNotificationDestinations {
+ /** The access request destinations for the securable. */
+ @JsonProperty("notification_destinations")
+ private NotificationDestination notificationDestinations;
+
+ /** The securable for which the access request destinations are being retrieved. */
+ @JsonProperty("securable")
+ private Securable securable;
+
+ public SecurableNotificationDestinations setNotificationDestinations(
+ NotificationDestination notificationDestinations) {
+ this.notificationDestinations = notificationDestinations;
+ return this;
+ }
+
+ public NotificationDestination getNotificationDestinations() {
+ return notificationDestinations;
+ }
+
+ public SecurableNotificationDestinations setSecurable(Securable securable) {
+ this.securable = securable;
+ return this;
+ }
+
+ public Securable getSecurable() {
+ return securable;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ SecurableNotificationDestinations that = (SecurableNotificationDestinations) o;
+ return Objects.equals(notificationDestinations, that.notificationDestinations)
+ && Objects.equals(securable, that.securable);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(notificationDestinations, securable);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(SecurableNotificationDestinations.class)
+ .add("notificationDestinations", notificationDestinations)
+ .add("securable", securable)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurablePermissions.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurablePermissions.java
new file mode 100755
index 000000000..4da6b5e22
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SecurablePermissions.java
@@ -0,0 +1,59 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class SecurablePermissions {
+ /** List of requested Unity Catalog permissions. */
+ @JsonProperty("permission")
+ private Collection permission;
+
+ /** The securable for which the access request destinations are being requested. */
+ @JsonProperty("securable")
+ private Securable securable;
+
+ public SecurablePermissions setPermission(Collection permission) {
+ this.permission = permission;
+ return this;
+ }
+
+ public Collection getPermission() {
+ return permission;
+ }
+
+ public SecurablePermissions setSecurable(Securable securable) {
+ this.securable = securable;
+ return this;
+ }
+
+ public Securable getSecurable() {
+ return securable;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ SecurablePermissions that = (SecurablePermissions) o;
+ return Objects.equals(permission, that.permission) && Objects.equals(securable, that.securable);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(permission, securable);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(SecurablePermissions.class)
+ .add("permission", permission)
+ .add("securable", securable)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SpecialDestination.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SpecialDestination.java
new file mode 100755
index 000000000..105d24844
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/SpecialDestination.java
@@ -0,0 +1,14 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum SpecialDestination {
+ SPECIAL_DESTINATION_CATALOG_OWNER,
+ SPECIAL_DESTINATION_CONNECTION_OWNER,
+ SPECIAL_DESTINATION_CREDENTIAL_OWNER,
+ SPECIAL_DESTINATION_EXTERNAL_LOCATION_OWNER,
+ SPECIAL_DESTINATION_METASTORE_OWNER,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAccessRequestDestinationsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAccessRequestDestinationsRequest.java
new file mode 100755
index 000000000..39fd62b4a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/catalog/UpdateAccessRequestDestinationsRequest.java
@@ -0,0 +1,77 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.catalog;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateAccessRequestDestinationsRequest {
+ /**
+ * For each destination, if **special_destination** is defined, then a **destination_id** is not
+ * required. Futhermore, the **destination_type** of a **special_destination** is always
+ * **EMAIL**. Otherwise, a **destination_id** and **destination_type** must be defined.
+ */
+ @JsonProperty("access_request_destinations")
+ private AccessRequestDestinations accessRequestDestinations;
+
+ /**
+ * The field mask must be a single string, with multiple fields separated by commas (no spaces).
+ * The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+ * (e.g., `author.given_name`). Specification of elements in sequence or map fields is not
+ * allowed, as only the entire collection field can be specified. Field names must exactly match
+ * the resource field names.
+ *
+ * A field mask of `*` indicates full replacement. It’s recommended to always explicitly list
+ * the fields being updated and avoid using `*` wildcards, as it can lead to unintended results if
+ * the API changes in the future.
+ */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private String updateMask;
+
+ public UpdateAccessRequestDestinationsRequest setAccessRequestDestinations(
+ AccessRequestDestinations accessRequestDestinations) {
+ this.accessRequestDestinations = accessRequestDestinations;
+ return this;
+ }
+
+ public AccessRequestDestinations getAccessRequestDestinations() {
+ return accessRequestDestinations;
+ }
+
+ public UpdateAccessRequestDestinationsRequest setUpdateMask(String updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public String getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateAccessRequestDestinationsRequest that = (UpdateAccessRequestDestinationsRequest) o;
+ return Objects.equals(accessRequestDestinations, that.accessRequestDestinations)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(accessRequestDestinations, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateAccessRequestDestinationsRequest.class)
+ .add("accessRequestDestinations", accessRequestDestinations)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateDefaultBaseEnvironmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateDefaultBaseEnvironmentRequest.java
new file mode 100755
index 000000000..598f95361
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateDefaultBaseEnvironmentRequest.java
@@ -0,0 +1,63 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CreateDefaultBaseEnvironmentRequest {
+ /** */
+ @JsonProperty("default_base_environment")
+ private DefaultBaseEnvironment defaultBaseEnvironment;
+
+ /**
+ * A unique identifier for this request. A random UUID is recommended. This request is only
+ * idempotent if a `request_id` is provided.
+ */
+ @JsonProperty("request_id")
+ private String requestId;
+
+ public CreateDefaultBaseEnvironmentRequest setDefaultBaseEnvironment(
+ DefaultBaseEnvironment defaultBaseEnvironment) {
+ this.defaultBaseEnvironment = defaultBaseEnvironment;
+ return this;
+ }
+
+ public DefaultBaseEnvironment getDefaultBaseEnvironment() {
+ return defaultBaseEnvironment;
+ }
+
+ public CreateDefaultBaseEnvironmentRequest setRequestId(String requestId) {
+ this.requestId = requestId;
+ return this;
+ }
+
+ public String getRequestId() {
+ return requestId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CreateDefaultBaseEnvironmentRequest that = (CreateDefaultBaseEnvironmentRequest) o;
+ return Objects.equals(defaultBaseEnvironment, that.defaultBaseEnvironment)
+ && Objects.equals(requestId, that.requestId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(defaultBaseEnvironment, requestId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CreateDefaultBaseEnvironmentRequest.class)
+ .add("defaultBaseEnvironment", defaultBaseEnvironment)
+ .add("requestId", requestId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePool.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePool.java
index cdc49aa18..828e11a2d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePool.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/CreateInstancePool.java
@@ -38,6 +38,13 @@ public class CreateInstancePool {
@JsonProperty("disk_spec")
private DiskSpec diskSpec;
+ /**
+ * For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids
+ * are enabled. This field should not be true if node_type_flexibility is set.
+ */
+ @JsonProperty("enable_auto_alternate_node_types")
+ private Boolean enableAutoAlternateNodeTypes;
+
/**
* Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire
* additional disk space when its Spark workers are running low on disk space. In AWS, this
@@ -83,6 +90,14 @@ public class CreateInstancePool {
@JsonProperty("min_idle_instances")
private Long minIdleInstances;
+ /**
+ * For pools with node type flexibility (Fleet-V2), this object contains the information about the
+ * alternate node type ids to use when attempting to launch a cluster if the node type id is not
+ * available. This field should not be set if enable_auto_alternate_node_types is true.
+ */
+ @JsonProperty("node_type_flexibility")
+ private NodeTypeFlexibility nodeTypeFlexibility;
+
/**
* This field encodes, through a single value, the resources available to each of the Spark nodes
* in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or
@@ -154,6 +169,15 @@ public DiskSpec getDiskSpec() {
return diskSpec;
}
+ public CreateInstancePool setEnableAutoAlternateNodeTypes(Boolean enableAutoAlternateNodeTypes) {
+ this.enableAutoAlternateNodeTypes = enableAutoAlternateNodeTypes;
+ return this;
+ }
+
+ public Boolean getEnableAutoAlternateNodeTypes() {
+ return enableAutoAlternateNodeTypes;
+ }
+
public CreateInstancePool setEnableElasticDisk(Boolean enableElasticDisk) {
this.enableElasticDisk = enableElasticDisk;
return this;
@@ -209,6 +233,15 @@ public Long getMinIdleInstances() {
return minIdleInstances;
}
+ public CreateInstancePool setNodeTypeFlexibility(NodeTypeFlexibility nodeTypeFlexibility) {
+ this.nodeTypeFlexibility = nodeTypeFlexibility;
+ return this;
+ }
+
+ public NodeTypeFlexibility getNodeTypeFlexibility() {
+ return nodeTypeFlexibility;
+ }
+
public CreateInstancePool setNodeTypeId(String nodeTypeId) {
this.nodeTypeId = nodeTypeId;
return this;
@@ -264,6 +297,7 @@ public boolean equals(Object o) {
&& Objects.equals(azureAttributes, that.azureAttributes)
&& Objects.equals(customTags, that.customTags)
&& Objects.equals(diskSpec, that.diskSpec)
+ && Objects.equals(enableAutoAlternateNodeTypes, that.enableAutoAlternateNodeTypes)
&& Objects.equals(enableElasticDisk, that.enableElasticDisk)
&& Objects.equals(gcpAttributes, that.gcpAttributes)
&& Objects.equals(
@@ -271,6 +305,7 @@ public boolean equals(Object o) {
&& Objects.equals(instancePoolName, that.instancePoolName)
&& Objects.equals(maxCapacity, that.maxCapacity)
&& Objects.equals(minIdleInstances, that.minIdleInstances)
+ && Objects.equals(nodeTypeFlexibility, that.nodeTypeFlexibility)
&& Objects.equals(nodeTypeId, that.nodeTypeId)
&& Objects.equals(preloadedDockerImages, that.preloadedDockerImages)
&& Objects.equals(preloadedSparkVersions, that.preloadedSparkVersions)
@@ -285,12 +320,14 @@ public int hashCode() {
azureAttributes,
customTags,
diskSpec,
+ enableAutoAlternateNodeTypes,
enableElasticDisk,
gcpAttributes,
idleInstanceAutoterminationMinutes,
instancePoolName,
maxCapacity,
minIdleInstances,
+ nodeTypeFlexibility,
nodeTypeId,
preloadedDockerImages,
preloadedSparkVersions,
@@ -305,12 +342,14 @@ public String toString() {
.add("azureAttributes", azureAttributes)
.add("customTags", customTags)
.add("diskSpec", diskSpec)
+ .add("enableAutoAlternateNodeTypes", enableAutoAlternateNodeTypes)
.add("enableElasticDisk", enableElasticDisk)
.add("gcpAttributes", gcpAttributes)
.add("idleInstanceAutoterminationMinutes", idleInstanceAutoterminationMinutes)
.add("instancePoolName", instancePoolName)
.add("maxCapacity", maxCapacity)
.add("minIdleInstances", minIdleInstances)
+ .add("nodeTypeFlexibility", nodeTypeFlexibility)
.add("nodeTypeId", nodeTypeId)
.add("preloadedDockerImages", preloadedDockerImages)
.add("preloadedSparkVersions", preloadedSparkVersions)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironment.java
new file mode 100755
index 000000000..db8bb6d3a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironment.java
@@ -0,0 +1,242 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class DefaultBaseEnvironment {
+ /** */
+ @JsonProperty("base_environment_cache")
+ private Collection baseEnvironmentCache;
+
+ /** */
+ @JsonProperty("created_timestamp")
+ private Long createdTimestamp;
+
+ /** */
+ @JsonProperty("creator_user_id")
+ private Long creatorUserId;
+
+ /**
+ * Note: we made `environment` non-internal because we need to expose its `client` field. All
+ * other fields should be treated as internal.
+ */
+ @JsonProperty("environment")
+ private Environment environment;
+
+ /** */
+ @JsonProperty("filepath")
+ private String filepath;
+
+ /** */
+ @JsonProperty("id")
+ private String id;
+
+ /** */
+ @JsonProperty("is_default")
+ private Boolean isDefault;
+
+ /** */
+ @JsonProperty("last_updated_timestamp")
+ private Long lastUpdatedTimestamp;
+
+ /** */
+ @JsonProperty("last_updated_user_id")
+ private Long lastUpdatedUserId;
+
+ /** */
+ @JsonProperty("message")
+ private String message;
+
+ /** */
+ @JsonProperty("name")
+ private String name;
+
+ /** */
+ @JsonProperty("principal_ids")
+ private Collection principalIds;
+
+ /** */
+ @JsonProperty("status")
+ private DefaultBaseEnvironmentCacheStatus status;
+
+ public DefaultBaseEnvironment setBaseEnvironmentCache(
+ Collection baseEnvironmentCache) {
+ this.baseEnvironmentCache = baseEnvironmentCache;
+ return this;
+ }
+
+ public Collection getBaseEnvironmentCache() {
+ return baseEnvironmentCache;
+ }
+
+ public DefaultBaseEnvironment setCreatedTimestamp(Long createdTimestamp) {
+ this.createdTimestamp = createdTimestamp;
+ return this;
+ }
+
+ public Long getCreatedTimestamp() {
+ return createdTimestamp;
+ }
+
+ public DefaultBaseEnvironment setCreatorUserId(Long creatorUserId) {
+ this.creatorUserId = creatorUserId;
+ return this;
+ }
+
+ public Long getCreatorUserId() {
+ return creatorUserId;
+ }
+
+ public DefaultBaseEnvironment setEnvironment(Environment environment) {
+ this.environment = environment;
+ return this;
+ }
+
+ public Environment getEnvironment() {
+ return environment;
+ }
+
+ public DefaultBaseEnvironment setFilepath(String filepath) {
+ this.filepath = filepath;
+ return this;
+ }
+
+ public String getFilepath() {
+ return filepath;
+ }
+
+ public DefaultBaseEnvironment setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public DefaultBaseEnvironment setIsDefault(Boolean isDefault) {
+ this.isDefault = isDefault;
+ return this;
+ }
+
+ public Boolean getIsDefault() {
+ return isDefault;
+ }
+
+ public DefaultBaseEnvironment setLastUpdatedTimestamp(Long lastUpdatedTimestamp) {
+ this.lastUpdatedTimestamp = lastUpdatedTimestamp;
+ return this;
+ }
+
+ public Long getLastUpdatedTimestamp() {
+ return lastUpdatedTimestamp;
+ }
+
+ public DefaultBaseEnvironment setLastUpdatedUserId(Long lastUpdatedUserId) {
+ this.lastUpdatedUserId = lastUpdatedUserId;
+ return this;
+ }
+
+ public Long getLastUpdatedUserId() {
+ return lastUpdatedUserId;
+ }
+
+ public DefaultBaseEnvironment setMessage(String message) {
+ this.message = message;
+ return this;
+ }
+
+ public String getMessage() {
+ return message;
+ }
+
+ public DefaultBaseEnvironment setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public DefaultBaseEnvironment setPrincipalIds(Collection principalIds) {
+ this.principalIds = principalIds;
+ return this;
+ }
+
+ public Collection getPrincipalIds() {
+ return principalIds;
+ }
+
+ public DefaultBaseEnvironment setStatus(DefaultBaseEnvironmentCacheStatus status) {
+ this.status = status;
+ return this;
+ }
+
+ public DefaultBaseEnvironmentCacheStatus getStatus() {
+ return status;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DefaultBaseEnvironment that = (DefaultBaseEnvironment) o;
+ return Objects.equals(baseEnvironmentCache, that.baseEnvironmentCache)
+ && Objects.equals(createdTimestamp, that.createdTimestamp)
+ && Objects.equals(creatorUserId, that.creatorUserId)
+ && Objects.equals(environment, that.environment)
+ && Objects.equals(filepath, that.filepath)
+ && Objects.equals(id, that.id)
+ && Objects.equals(isDefault, that.isDefault)
+ && Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp)
+ && Objects.equals(lastUpdatedUserId, that.lastUpdatedUserId)
+ && Objects.equals(message, that.message)
+ && Objects.equals(name, that.name)
+ && Objects.equals(principalIds, that.principalIds)
+ && Objects.equals(status, that.status);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ baseEnvironmentCache,
+ createdTimestamp,
+ creatorUserId,
+ environment,
+ filepath,
+ id,
+ isDefault,
+ lastUpdatedTimestamp,
+ lastUpdatedUserId,
+ message,
+ name,
+ principalIds,
+ status);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DefaultBaseEnvironment.class)
+ .add("baseEnvironmentCache", baseEnvironmentCache)
+ .add("createdTimestamp", createdTimestamp)
+ .add("creatorUserId", creatorUserId)
+ .add("environment", environment)
+ .add("filepath", filepath)
+ .add("id", id)
+ .add("isDefault", isDefault)
+ .add("lastUpdatedTimestamp", lastUpdatedTimestamp)
+ .add("lastUpdatedUserId", lastUpdatedUserId)
+ .add("message", message)
+ .add("name", name)
+ .add("principalIds", principalIds)
+ .add("status", status)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironmentCache.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironmentCache.java
new file mode 100755
index 000000000..344da911a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironmentCache.java
@@ -0,0 +1,75 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class DefaultBaseEnvironmentCache {
+ /** */
+ @JsonProperty("materialized_environment")
+ private MaterializedEnvironment materializedEnvironment;
+
+ /** */
+ @JsonProperty("message")
+ private String message;
+
+ /** */
+ @JsonProperty("status")
+ private DefaultBaseEnvironmentCacheStatus status;
+
+ public DefaultBaseEnvironmentCache setMaterializedEnvironment(
+ MaterializedEnvironment materializedEnvironment) {
+ this.materializedEnvironment = materializedEnvironment;
+ return this;
+ }
+
+ public MaterializedEnvironment getMaterializedEnvironment() {
+ return materializedEnvironment;
+ }
+
+ public DefaultBaseEnvironmentCache setMessage(String message) {
+ this.message = message;
+ return this;
+ }
+
+ public String getMessage() {
+ return message;
+ }
+
+ public DefaultBaseEnvironmentCache setStatus(DefaultBaseEnvironmentCacheStatus status) {
+ this.status = status;
+ return this;
+ }
+
+ public DefaultBaseEnvironmentCacheStatus getStatus() {
+ return status;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DefaultBaseEnvironmentCache that = (DefaultBaseEnvironmentCache) o;
+ return Objects.equals(materializedEnvironment, that.materializedEnvironment)
+ && Objects.equals(message, that.message)
+ && Objects.equals(status, that.status);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(materializedEnvironment, message, status);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DefaultBaseEnvironmentCache.class)
+ .add("materializedEnvironment", materializedEnvironment)
+ .add("message", message)
+ .add("status", status)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironmentCacheStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironmentCacheStatus.java
new file mode 100755
index 000000000..aaee91c80
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DefaultBaseEnvironmentCacheStatus.java
@@ -0,0 +1,15 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum DefaultBaseEnvironmentCacheStatus {
+ CREATED,
+ EXPIRED,
+ FAILED,
+ INVALID,
+ PENDING,
+ REFRESHING,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteDefaultBaseEnvironmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteDefaultBaseEnvironmentRequest.java
new file mode 100755
index 000000000..bef81a175
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/DeleteDefaultBaseEnvironmentRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class DeleteDefaultBaseEnvironmentRequest {
+ /** */
+ @JsonIgnore private String id;
+
+ public DeleteDefaultBaseEnvironmentRequest setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DeleteDefaultBaseEnvironmentRequest that = (DeleteDefaultBaseEnvironmentRequest) o;
+ return Objects.equals(id, that.id);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(id);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(DeleteDefaultBaseEnvironmentRequest.class).add("id", id).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java
index b5cd70eb2..c2eea1d95 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/EditInstancePool.java
@@ -19,6 +19,13 @@ public class EditInstancePool {
@JsonProperty("custom_tags")
private Map customTags;
+ /**
+ * For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids
+ * are enabled. This field should not be true if node_type_flexibility is set.
+ */
+ @JsonProperty("enable_auto_alternate_node_types")
+ private Boolean enableAutoAlternateNodeTypes;
+
/**
* Automatically terminates the extra instances in the pool cache after they are inactive for this
* time in minutes if min_idle_instances requirement is already met. If not set, the extra pool
@@ -52,6 +59,14 @@ public class EditInstancePool {
@JsonProperty("min_idle_instances")
private Long minIdleInstances;
+ /**
+ * For pools with node type flexibility (Fleet-V2), this object contains the information about the
+ * alternate node type ids to use when attempting to launch a cluster if the node type id is not
+ * available. This field should not be set if enable_auto_alternate_node_types is true.
+ */
+ @JsonProperty("node_type_flexibility")
+ private NodeTypeFlexibility nodeTypeFlexibility;
+
/**
* This field encodes, through a single value, the resources available to each of the Spark nodes
* in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or
@@ -84,6 +99,15 @@ public Map getCustomTags() {
return customTags;
}
+ public EditInstancePool setEnableAutoAlternateNodeTypes(Boolean enableAutoAlternateNodeTypes) {
+ this.enableAutoAlternateNodeTypes = enableAutoAlternateNodeTypes;
+ return this;
+ }
+
+ public Boolean getEnableAutoAlternateNodeTypes() {
+ return enableAutoAlternateNodeTypes;
+ }
+
public EditInstancePool setIdleInstanceAutoterminationMinutes(
Long idleInstanceAutoterminationMinutes) {
this.idleInstanceAutoterminationMinutes = idleInstanceAutoterminationMinutes;
@@ -130,6 +154,15 @@ public Long getMinIdleInstances() {
return minIdleInstances;
}
+ public EditInstancePool setNodeTypeFlexibility(NodeTypeFlexibility nodeTypeFlexibility) {
+ this.nodeTypeFlexibility = nodeTypeFlexibility;
+ return this;
+ }
+
+ public NodeTypeFlexibility getNodeTypeFlexibility() {
+ return nodeTypeFlexibility;
+ }
+
public EditInstancePool setNodeTypeId(String nodeTypeId) {
this.nodeTypeId = nodeTypeId;
return this;
@@ -163,12 +196,14 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
EditInstancePool that = (EditInstancePool) o;
return Objects.equals(customTags, that.customTags)
+ && Objects.equals(enableAutoAlternateNodeTypes, that.enableAutoAlternateNodeTypes)
&& Objects.equals(
idleInstanceAutoterminationMinutes, that.idleInstanceAutoterminationMinutes)
&& Objects.equals(instancePoolId, that.instancePoolId)
&& Objects.equals(instancePoolName, that.instancePoolName)
&& Objects.equals(maxCapacity, that.maxCapacity)
&& Objects.equals(minIdleInstances, that.minIdleInstances)
+ && Objects.equals(nodeTypeFlexibility, that.nodeTypeFlexibility)
&& Objects.equals(nodeTypeId, that.nodeTypeId)
&& Objects.equals(remoteDiskThroughput, that.remoteDiskThroughput)
&& Objects.equals(totalInitialRemoteDiskSize, that.totalInitialRemoteDiskSize);
@@ -178,11 +213,13 @@ public boolean equals(Object o) {
public int hashCode() {
return Objects.hash(
customTags,
+ enableAutoAlternateNodeTypes,
idleInstanceAutoterminationMinutes,
instancePoolId,
instancePoolName,
maxCapacity,
minIdleInstances,
+ nodeTypeFlexibility,
nodeTypeId,
remoteDiskThroughput,
totalInitialRemoteDiskSize);
@@ -192,11 +229,13 @@ public int hashCode() {
public String toString() {
return new ToStringer(EditInstancePool.class)
.add("customTags", customTags)
+ .add("enableAutoAlternateNodeTypes", enableAutoAlternateNodeTypes)
.add("idleInstanceAutoterminationMinutes", idleInstanceAutoterminationMinutes)
.add("instancePoolId", instancePoolId)
.add("instancePoolName", instancePoolName)
.add("maxCapacity", maxCapacity)
.add("minIdleInstances", minIdleInstances)
+ .add("nodeTypeFlexibility", nodeTypeFlexibility)
.add("nodeTypeId", nodeTypeId)
.add("remoteDiskThroughput", remoteDiskThroughput)
.add("totalInitialRemoteDiskSize", totalInitialRemoteDiskSize)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePool.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePool.java
index be68fd165..c9dd511d8 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePool.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/GetInstancePool.java
@@ -52,6 +52,13 @@ public class GetInstancePool {
@JsonProperty("disk_spec")
private DiskSpec diskSpec;
+ /**
+ * For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids
+ * are enabled. This field should not be true if node_type_flexibility is set.
+ */
+ @JsonProperty("enable_auto_alternate_node_types")
+ private Boolean enableAutoAlternateNodeTypes;
+
/**
* Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire
* additional disk space when its Spark workers are running low on disk space. In AWS, this
@@ -101,6 +108,14 @@ public class GetInstancePool {
@JsonProperty("min_idle_instances")
private Long minIdleInstances;
+ /**
+ * For pools with node type flexibility (Fleet-V2), this object contains the information about the
+ * alternate node type ids to use when attempting to launch a cluster if the node type id is not
+ * available. This field should not be set if enable_auto_alternate_node_types is true.
+ */
+ @JsonProperty("node_type_flexibility")
+ private NodeTypeFlexibility nodeTypeFlexibility;
+
/**
* This field encodes, through a single value, the resources available to each of the Spark nodes
* in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or
@@ -193,6 +208,15 @@ public DiskSpec getDiskSpec() {
return diskSpec;
}
+ public GetInstancePool setEnableAutoAlternateNodeTypes(Boolean enableAutoAlternateNodeTypes) {
+ this.enableAutoAlternateNodeTypes = enableAutoAlternateNodeTypes;
+ return this;
+ }
+
+ public Boolean getEnableAutoAlternateNodeTypes() {
+ return enableAutoAlternateNodeTypes;
+ }
+
public GetInstancePool setEnableElasticDisk(Boolean enableElasticDisk) {
this.enableElasticDisk = enableElasticDisk;
return this;
@@ -257,6 +281,15 @@ public Long getMinIdleInstances() {
return minIdleInstances;
}
+ public GetInstancePool setNodeTypeFlexibility(NodeTypeFlexibility nodeTypeFlexibility) {
+ this.nodeTypeFlexibility = nodeTypeFlexibility;
+ return this;
+ }
+
+ public NodeTypeFlexibility getNodeTypeFlexibility() {
+ return nodeTypeFlexibility;
+ }
+
public GetInstancePool setNodeTypeId(String nodeTypeId) {
this.nodeTypeId = nodeTypeId;
return this;
@@ -339,6 +372,7 @@ public boolean equals(Object o) {
&& Objects.equals(customTags, that.customTags)
&& Objects.equals(defaultTags, that.defaultTags)
&& Objects.equals(diskSpec, that.diskSpec)
+ && Objects.equals(enableAutoAlternateNodeTypes, that.enableAutoAlternateNodeTypes)
&& Objects.equals(enableElasticDisk, that.enableElasticDisk)
&& Objects.equals(gcpAttributes, that.gcpAttributes)
&& Objects.equals(
@@ -347,6 +381,7 @@ public boolean equals(Object o) {
&& Objects.equals(instancePoolName, that.instancePoolName)
&& Objects.equals(maxCapacity, that.maxCapacity)
&& Objects.equals(minIdleInstances, that.minIdleInstances)
+ && Objects.equals(nodeTypeFlexibility, that.nodeTypeFlexibility)
&& Objects.equals(nodeTypeId, that.nodeTypeId)
&& Objects.equals(preloadedDockerImages, that.preloadedDockerImages)
&& Objects.equals(preloadedSparkVersions, that.preloadedSparkVersions)
@@ -365,6 +400,7 @@ public int hashCode() {
customTags,
defaultTags,
diskSpec,
+ enableAutoAlternateNodeTypes,
enableElasticDisk,
gcpAttributes,
idleInstanceAutoterminationMinutes,
@@ -372,6 +408,7 @@ public int hashCode() {
instancePoolName,
maxCapacity,
minIdleInstances,
+ nodeTypeFlexibility,
nodeTypeId,
preloadedDockerImages,
preloadedSparkVersions,
@@ -390,6 +427,7 @@ public String toString() {
.add("customTags", customTags)
.add("defaultTags", defaultTags)
.add("diskSpec", diskSpec)
+ .add("enableAutoAlternateNodeTypes", enableAutoAlternateNodeTypes)
.add("enableElasticDisk", enableElasticDisk)
.add("gcpAttributes", gcpAttributes)
.add("idleInstanceAutoterminationMinutes", idleInstanceAutoterminationMinutes)
@@ -397,6 +435,7 @@ public String toString() {
.add("instancePoolName", instancePoolName)
.add("maxCapacity", maxCapacity)
.add("minIdleInstances", minIdleInstances)
+ .add("nodeTypeFlexibility", nodeTypeFlexibility)
.add("nodeTypeId", nodeTypeId)
.add("preloadedDockerImages", preloadedDockerImages)
.add("preloadedSparkVersions", preloadedSparkVersions)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java
index 485798092..9f9932894 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/InstancePoolAndStats.java
@@ -52,6 +52,13 @@ public class InstancePoolAndStats {
@JsonProperty("disk_spec")
private DiskSpec diskSpec;
+ /**
+ * For pools with node type flexibility (Fleet-V2), whether auto generated alternate node type ids
+ * are enabled. This field should not be true if node_type_flexibility is set.
+ */
+ @JsonProperty("enable_auto_alternate_node_types")
+ private Boolean enableAutoAlternateNodeTypes;
+
/**
* Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire
* additional disk space when its Spark workers are running low on disk space. In AWS, this
@@ -101,6 +108,14 @@ public class InstancePoolAndStats {
@JsonProperty("min_idle_instances")
private Long minIdleInstances;
+ /**
+ * For pools with node type flexibility (Fleet-V2), this object contains the information about the
+ * alternate node type ids to use when attempting to launch a cluster if the node type id is not
+ * available. This field should not be set if enable_auto_alternate_node_types is true.
+ */
+ @JsonProperty("node_type_flexibility")
+ private NodeTypeFlexibility nodeTypeFlexibility;
+
/**
* This field encodes, through a single value, the resources available to each of the Spark nodes
* in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or
@@ -193,6 +208,16 @@ public DiskSpec getDiskSpec() {
return diskSpec;
}
+ public InstancePoolAndStats setEnableAutoAlternateNodeTypes(
+ Boolean enableAutoAlternateNodeTypes) {
+ this.enableAutoAlternateNodeTypes = enableAutoAlternateNodeTypes;
+ return this;
+ }
+
+ public Boolean getEnableAutoAlternateNodeTypes() {
+ return enableAutoAlternateNodeTypes;
+ }
+
public InstancePoolAndStats setEnableElasticDisk(Boolean enableElasticDisk) {
this.enableElasticDisk = enableElasticDisk;
return this;
@@ -257,6 +282,15 @@ public Long getMinIdleInstances() {
return minIdleInstances;
}
+ public InstancePoolAndStats setNodeTypeFlexibility(NodeTypeFlexibility nodeTypeFlexibility) {
+ this.nodeTypeFlexibility = nodeTypeFlexibility;
+ return this;
+ }
+
+ public NodeTypeFlexibility getNodeTypeFlexibility() {
+ return nodeTypeFlexibility;
+ }
+
public InstancePoolAndStats setNodeTypeId(String nodeTypeId) {
this.nodeTypeId = nodeTypeId;
return this;
@@ -340,6 +374,7 @@ public boolean equals(Object o) {
&& Objects.equals(customTags, that.customTags)
&& Objects.equals(defaultTags, that.defaultTags)
&& Objects.equals(diskSpec, that.diskSpec)
+ && Objects.equals(enableAutoAlternateNodeTypes, that.enableAutoAlternateNodeTypes)
&& Objects.equals(enableElasticDisk, that.enableElasticDisk)
&& Objects.equals(gcpAttributes, that.gcpAttributes)
&& Objects.equals(
@@ -348,6 +383,7 @@ public boolean equals(Object o) {
&& Objects.equals(instancePoolName, that.instancePoolName)
&& Objects.equals(maxCapacity, that.maxCapacity)
&& Objects.equals(minIdleInstances, that.minIdleInstances)
+ && Objects.equals(nodeTypeFlexibility, that.nodeTypeFlexibility)
&& Objects.equals(nodeTypeId, that.nodeTypeId)
&& Objects.equals(preloadedDockerImages, that.preloadedDockerImages)
&& Objects.equals(preloadedSparkVersions, that.preloadedSparkVersions)
@@ -366,6 +402,7 @@ public int hashCode() {
customTags,
defaultTags,
diskSpec,
+ enableAutoAlternateNodeTypes,
enableElasticDisk,
gcpAttributes,
idleInstanceAutoterminationMinutes,
@@ -373,6 +410,7 @@ public int hashCode() {
instancePoolName,
maxCapacity,
minIdleInstances,
+ nodeTypeFlexibility,
nodeTypeId,
preloadedDockerImages,
preloadedSparkVersions,
@@ -391,6 +429,7 @@ public String toString() {
.add("customTags", customTags)
.add("defaultTags", defaultTags)
.add("diskSpec", diskSpec)
+ .add("enableAutoAlternateNodeTypes", enableAutoAlternateNodeTypes)
.add("enableElasticDisk", enableElasticDisk)
.add("gcpAttributes", gcpAttributes)
.add("idleInstanceAutoterminationMinutes", idleInstanceAutoterminationMinutes)
@@ -398,6 +437,7 @@ public String toString() {
.add("instancePoolName", instancePoolName)
.add("maxCapacity", maxCapacity)
.add("minIdleInstances", minIdleInstances)
+ .add("nodeTypeFlexibility", nodeTypeFlexibility)
.add("nodeTypeId", nodeTypeId)
.add("preloadedDockerImages", preloadedDockerImages)
.add("preloadedSparkVersions", preloadedSparkVersions)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesAPI.java
index e1495320b..666cd05f2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesAPI.java
@@ -69,6 +69,35 @@ public Iterable clusterStatus(ClusterStatus request) {
request, impl::clusterStatus, ClusterLibraryStatuses::getLibraryStatuses, response -> null);
}
+ public DefaultBaseEnvironment createDefaultBaseEnvironment(
+ DefaultBaseEnvironment defaultBaseEnvironment) {
+ return createDefaultBaseEnvironment(
+ new CreateDefaultBaseEnvironmentRequest()
+ .setDefaultBaseEnvironment(defaultBaseEnvironment));
+ }
+
+ /**
+ * Create a default base environment within workspaces to define the environment version and a
+ * list of dependencies to be used in serverless notebooks and jobs. This process will
+ * asynchronously generate a cache to optimize dependency resolution.
+ */
+ public DefaultBaseEnvironment createDefaultBaseEnvironment(
+ CreateDefaultBaseEnvironmentRequest request) {
+ return impl.createDefaultBaseEnvironment(request);
+ }
+
+ public void deleteDefaultBaseEnvironment(String id) {
+ deleteDefaultBaseEnvironment(new DeleteDefaultBaseEnvironmentRequest().setId(id));
+ }
+
+ /**
+ * Delete the default base environment given an ID. The default base environment may be used by
+ * downstream workloads. Please ensure that the deletion is intentional.
+ */
+ public void deleteDefaultBaseEnvironment(DeleteDefaultBaseEnvironmentRequest request) {
+ impl.deleteDefaultBaseEnvironment(request);
+ }
+
public void install(String clusterId, Collection libraries) {
install(new InstallLibraries().setClusterId(clusterId).setLibraries(libraries));
}
@@ -81,6 +110,34 @@ public void install(InstallLibraries request) {
impl.install(request);
}
+ /** List default base environments defined in the workspaces for the requested user. */
+ public Iterable listDefaultBaseEnvironments(
+ ListDefaultBaseEnvironmentsRequest request) {
+ return new Paginator<>(
+ request,
+ impl::listDefaultBaseEnvironments,
+ ListDefaultBaseEnvironmentsResponse::getDefaultBaseEnvironments,
+ response -> {
+ String token = response.getNextPageToken();
+ if (token == null || token.isEmpty()) {
+ return null;
+ }
+ return request.setPageToken(token);
+ });
+ }
+
+ public void refreshDefaultBaseEnvironments(Collection ids) {
+ refreshDefaultBaseEnvironments(new RefreshDefaultBaseEnvironmentsRequest().setIds(ids));
+ }
+
+ /**
+ * Refresh the cached default base environments for the given IDs. This process will
+ * asynchronously regenerate the caches. The existing caches remains available until it expires.
+ */
+ public void refreshDefaultBaseEnvironments(RefreshDefaultBaseEnvironmentsRequest request) {
+ impl.refreshDefaultBaseEnvironments(request);
+ }
+
public void uninstall(String clusterId, Collection libraries) {
uninstall(new UninstallLibraries().setClusterId(clusterId).setLibraries(libraries));
}
@@ -93,6 +150,19 @@ public void uninstall(UninstallLibraries request) {
impl.uninstall(request);
}
+ public DefaultBaseEnvironment updateDefaultBaseEnvironment(String id) {
+ return updateDefaultBaseEnvironment(new UpdateDefaultBaseEnvironmentRequest().setId(id));
+ }
+
+ /**
+ * Update the default base environment for the given ID. This process will asynchronously
+ * regenerate the cache. The existing cache remains available until it expires.
+ */
+ public DefaultBaseEnvironment updateDefaultBaseEnvironment(
+ UpdateDefaultBaseEnvironmentRequest request) {
+ return impl.updateDefaultBaseEnvironment(request);
+ }
+
public LibrariesService impl() {
return impl;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java
index 6bb0dd63e..70efdfcbe 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesImpl.java
@@ -41,6 +41,34 @@ public ClusterLibraryStatuses clusterStatus(ClusterStatus request) {
}
}
+ @Override
+ public DefaultBaseEnvironment createDefaultBaseEnvironment(
+ CreateDefaultBaseEnvironmentRequest request) {
+ String path = "/api/2.0/default-base-environments";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, DefaultBaseEnvironment.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void deleteDefaultBaseEnvironment(DeleteDefaultBaseEnvironmentRequest request) {
+ String path = String.format("/api/2.0/default-base-environments/%s", request.getId());
+ try {
+ Request req = new Request("DELETE", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public void install(InstallLibraries request) {
String path = "/api/2.0/libraries/install";
@@ -55,6 +83,34 @@ public void install(InstallLibraries request) {
}
}
+ @Override
+ public ListDefaultBaseEnvironmentsResponse listDefaultBaseEnvironments(
+ ListDefaultBaseEnvironmentsRequest request) {
+ String path = "/api/2.0/default-base-environments";
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ListDefaultBaseEnvironmentsResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void refreshDefaultBaseEnvironments(RefreshDefaultBaseEnvironmentsRequest request) {
+ String path = "/api/2.0/default-base-environments/refresh";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public void uninstall(UninstallLibraries request) {
String path = "/api/2.0/libraries/uninstall";
@@ -68,4 +124,19 @@ public void uninstall(UninstallLibraries request) {
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
}
+
+ @Override
+ public DefaultBaseEnvironment updateDefaultBaseEnvironment(
+ UpdateDefaultBaseEnvironmentRequest request) {
+ String path = String.format("/api/2.0/default-base-environments/%s", request.getId());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, DefaultBaseEnvironment.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesService.java
index 5ccaf55cb..d98477363 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/LibrariesService.java
@@ -41,15 +41,48 @@ public interface LibrariesService {
*/
ClusterLibraryStatuses clusterStatus(ClusterStatus clusterStatus);
+ /**
+ * Create a default base environment within workspaces to define the environment version and a
+ * list of dependencies to be used in serverless notebooks and jobs. This process will
+ * asynchronously generate a cache to optimize dependency resolution.
+ */
+ DefaultBaseEnvironment createDefaultBaseEnvironment(
+ CreateDefaultBaseEnvironmentRequest createDefaultBaseEnvironmentRequest);
+
+ /**
+ * Delete the default base environment given an ID. The default base environment may be used by
+ * downstream workloads. Please ensure that the deletion is intentional.
+ */
+ void deleteDefaultBaseEnvironment(
+ DeleteDefaultBaseEnvironmentRequest deleteDefaultBaseEnvironmentRequest);
+
/**
* Add libraries to install on a cluster. The installation is asynchronous; it happens in the
* background after the completion of this request.
*/
void install(InstallLibraries installLibraries);
+ /** List default base environments defined in the workspaces for the requested user. */
+ ListDefaultBaseEnvironmentsResponse listDefaultBaseEnvironments(
+ ListDefaultBaseEnvironmentsRequest listDefaultBaseEnvironmentsRequest);
+
+ /**
+ * Refresh the cached default base environments for the given IDs. This process will
+ * asynchronously regenerate the caches. The existing caches remains available until it expires.
+ */
+ void refreshDefaultBaseEnvironments(
+ RefreshDefaultBaseEnvironmentsRequest refreshDefaultBaseEnvironmentsRequest);
+
/**
* Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the cluster
* is restarted. A request to uninstall a library that is not currently installed is ignored.
*/
void uninstall(UninstallLibraries uninstallLibraries);
+
+ /**
+ * Update the default base environment for the given ID. This process will asynchronously
+ * regenerate the cache. The existing cache remains available until it expires.
+ */
+ DefaultBaseEnvironment updateDefaultBaseEnvironment(
+ UpdateDefaultBaseEnvironmentRequest updateDefaultBaseEnvironmentRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListDefaultBaseEnvironmentsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListDefaultBaseEnvironmentsRequest.java
new file mode 100755
index 000000000..b4f8149ba
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListDefaultBaseEnvironmentsRequest.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListDefaultBaseEnvironmentsRequest {
+ /** */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListDefaultBaseEnvironmentsRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListDefaultBaseEnvironmentsRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListDefaultBaseEnvironmentsRequest that = (ListDefaultBaseEnvironmentsRequest) o;
+ return Objects.equals(pageSize, that.pageSize) && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pageSize, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListDefaultBaseEnvironmentsRequest.class)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListDefaultBaseEnvironmentsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListDefaultBaseEnvironmentsResponse.java
new file mode 100755
index 000000000..c941b5fa9
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/ListDefaultBaseEnvironmentsResponse.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ListDefaultBaseEnvironmentsResponse {
+ /** */
+ @JsonProperty("default_base_environments")
+ private Collection defaultBaseEnvironments;
+
+ /** */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ public ListDefaultBaseEnvironmentsResponse setDefaultBaseEnvironments(
+ Collection defaultBaseEnvironments) {
+ this.defaultBaseEnvironments = defaultBaseEnvironments;
+ return this;
+ }
+
+ public Collection getDefaultBaseEnvironments() {
+ return defaultBaseEnvironments;
+ }
+
+ public ListDefaultBaseEnvironmentsResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListDefaultBaseEnvironmentsResponse that = (ListDefaultBaseEnvironmentsResponse) o;
+ return Objects.equals(defaultBaseEnvironments, that.defaultBaseEnvironments)
+ && Objects.equals(nextPageToken, that.nextPageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(defaultBaseEnvironments, nextPageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListDefaultBaseEnvironmentsResponse.class)
+ .add("defaultBaseEnvironments", defaultBaseEnvironments)
+ .add("nextPageToken", nextPageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/MaterializedEnvironment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/MaterializedEnvironment.java
new file mode 100755
index 000000000..52a43e725
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/MaterializedEnvironment.java
@@ -0,0 +1,59 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/**
+ * Materialized Environment information enables environment sharing and reuse via Environment
+ * Caching during library installations. Currently this feature is only supported for Python
+ * libraries.
+ *
+ * - If the env cache entry in LMv2 DB doesn't exist or invalid, library installations and
+ * environment materialization will occur. A new Materialized Environment metadata will be sent from
+ * DP upon successful library installations and env materialization, and is persisted into database
+ * by LMv2. - If the env cache entry in LMv2 DB is valid, the Materialized Environment will be sent
+ * to DP by LMv2, and DP will restore the cached environment from a store instead of reinstalling
+ * libraries from scratch.
+ *
+ *
If changed, also update estore/namespaces/defaultbaseenvironments/latest.proto with new
+ * version
+ */
+@Generated
+public class MaterializedEnvironment {
+ /** The timestamp (in epoch milliseconds) when the materialized env is updated. */
+ @JsonProperty("last_updated_timestamp")
+ private Long lastUpdatedTimestamp;
+
+ public MaterializedEnvironment setLastUpdatedTimestamp(Long lastUpdatedTimestamp) {
+ this.lastUpdatedTimestamp = lastUpdatedTimestamp;
+ return this;
+ }
+
+ public Long getLastUpdatedTimestamp() {
+ return lastUpdatedTimestamp;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ MaterializedEnvironment that = (MaterializedEnvironment) o;
+ return Objects.equals(lastUpdatedTimestamp, that.lastUpdatedTimestamp);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(lastUpdatedTimestamp);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(MaterializedEnvironment.class)
+ .add("lastUpdatedTimestamp", lastUpdatedTimestamp)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeTypeFlexibility.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeTypeFlexibility.java
new file mode 100755
index 000000000..7366ed43d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/NodeTypeFlexibility.java
@@ -0,0 +1,33 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+/**
+ * For Fleet-V2 using classic clusters, this object contains the information about the alternate
+ * node type ids to use when attempting to launch a cluster. It can be used with both the driver and
+ * worker node types.
+ */
+@Generated
+public class NodeTypeFlexibility {
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash();
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(NodeTypeFlexibility.class).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RefreshDefaultBaseEnvironmentsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RefreshDefaultBaseEnvironmentsRequest.java
new file mode 100755
index 000000000..c7f60255b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/RefreshDefaultBaseEnvironmentsRequest.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class RefreshDefaultBaseEnvironmentsRequest {
+ /** */
+ @JsonProperty("ids")
+ private Collection ids;
+
+ public RefreshDefaultBaseEnvironmentsRequest setIds(Collection ids) {
+ this.ids = ids;
+ return this;
+ }
+
+ public Collection getIds() {
+ return ids;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ RefreshDefaultBaseEnvironmentsRequest that = (RefreshDefaultBaseEnvironmentsRequest) o;
+ return Objects.equals(ids, that.ids);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(ids);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(RefreshDefaultBaseEnvironmentsRequest.class).add("ids", ids).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateDefaultBaseEnvironmentRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateDefaultBaseEnvironmentRequest.java
new file mode 100755
index 000000000..fcf7e1e9d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/compute/UpdateDefaultBaseEnvironmentRequest.java
@@ -0,0 +1,60 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.compute;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateDefaultBaseEnvironmentRequest {
+ /** */
+ @JsonProperty("default_base_environment")
+ private DefaultBaseEnvironment defaultBaseEnvironment;
+
+ /** */
+ @JsonIgnore private String id;
+
+ public UpdateDefaultBaseEnvironmentRequest setDefaultBaseEnvironment(
+ DefaultBaseEnvironment defaultBaseEnvironment) {
+ this.defaultBaseEnvironment = defaultBaseEnvironment;
+ return this;
+ }
+
+ public DefaultBaseEnvironment getDefaultBaseEnvironment() {
+ return defaultBaseEnvironment;
+ }
+
+ public UpdateDefaultBaseEnvironmentRequest setId(String id) {
+ this.id = id;
+ return this;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateDefaultBaseEnvironmentRequest that = (UpdateDefaultBaseEnvironmentRequest) o;
+ return Objects.equals(defaultBaseEnvironment, that.defaultBaseEnvironment)
+ && Objects.equals(id, that.id);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(defaultBaseEnvironment, id);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateDefaultBaseEnvironmentRequest.class)
+ .add("defaultBaseEnvironment", defaultBaseEnvironment)
+ .add("id", id)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelPublishedQueryExecutionRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelPublishedQueryExecutionRequest.java
new file mode 100755
index 000000000..07776b06d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelPublishedQueryExecutionRequest.java
@@ -0,0 +1,79 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class CancelPublishedQueryExecutionRequest {
+ /** */
+ @JsonIgnore
+ @QueryParam("dashboard_name")
+ private String dashboardName;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("dashboard_revision_id")
+ private String dashboardRevisionId;
+
+ /** Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ */
+ @JsonIgnore
+ @QueryParam("tokens")
+ private Collection tokens;
+
+ public CancelPublishedQueryExecutionRequest setDashboardName(String dashboardName) {
+ this.dashboardName = dashboardName;
+ return this;
+ }
+
+ public String getDashboardName() {
+ return dashboardName;
+ }
+
+ public CancelPublishedQueryExecutionRequest setDashboardRevisionId(String dashboardRevisionId) {
+ this.dashboardRevisionId = dashboardRevisionId;
+ return this;
+ }
+
+ public String getDashboardRevisionId() {
+ return dashboardRevisionId;
+ }
+
+ public CancelPublishedQueryExecutionRequest setTokens(Collection tokens) {
+ this.tokens = tokens;
+ return this;
+ }
+
+ public Collection getTokens() {
+ return tokens;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CancelPublishedQueryExecutionRequest that = (CancelPublishedQueryExecutionRequest) o;
+ return Objects.equals(dashboardName, that.dashboardName)
+ && Objects.equals(dashboardRevisionId, that.dashboardRevisionId)
+ && Objects.equals(tokens, that.tokens);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dashboardName, dashboardRevisionId, tokens);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CancelPublishedQueryExecutionRequest.class)
+ .add("dashboardName", dashboardName)
+ .add("dashboardRevisionId", dashboardRevisionId)
+ .add("tokens", tokens)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponse.java
new file mode 100755
index 000000000..3476fb9ef
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponse.java
@@ -0,0 +1,44 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class CancelQueryExecutionResponse {
+ /** */
+ @JsonProperty("status")
+ private Collection status;
+
+ public CancelQueryExecutionResponse setStatus(
+ Collection status) {
+ this.status = status;
+ return this;
+ }
+
+ public Collection getStatus() {
+ return status;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CancelQueryExecutionResponse that = (CancelQueryExecutionResponse) o;
+ return Objects.equals(status, that.status);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(status);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CancelQueryExecutionResponse.class).add("status", status).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponseStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponseStatus.java
new file mode 100755
index 000000000..3d8a03c06
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/CancelQueryExecutionResponseStatus.java
@@ -0,0 +1,77 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class CancelQueryExecutionResponseStatus {
+ /**
+ * The token to poll for result asynchronously Example:
+ * EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
+ */
+ @JsonProperty("data_token")
+ private String dataToken;
+
+ /** */
+ @JsonProperty("pending")
+ private Empty pending;
+
+ /** */
+ @JsonProperty("success")
+ private Empty success;
+
+ public CancelQueryExecutionResponseStatus setDataToken(String dataToken) {
+ this.dataToken = dataToken;
+ return this;
+ }
+
+ public String getDataToken() {
+ return dataToken;
+ }
+
+ public CancelQueryExecutionResponseStatus setPending(Empty pending) {
+ this.pending = pending;
+ return this;
+ }
+
+ public Empty getPending() {
+ return pending;
+ }
+
+ public CancelQueryExecutionResponseStatus setSuccess(Empty success) {
+ this.success = success;
+ return this;
+ }
+
+ public Empty getSuccess() {
+ return success;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CancelQueryExecutionResponseStatus that = (CancelQueryExecutionResponseStatus) o;
+ return Objects.equals(dataToken, that.dataToken)
+ && Objects.equals(pending, that.pending)
+ && Objects.equals(success, that.success);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dataToken, pending, success);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(CancelQueryExecutionResponseStatus.class)
+ .add("dataToken", dataToken)
+ .add("pending", pending)
+ .add("success", success)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Empty.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Empty.java
new file mode 100755
index 000000000..8714d62a6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/Empty.java
@@ -0,0 +1,32 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+/**
+ * Represents an empty message, similar to google.protobuf.Empty, which is not available in the firm
+ * right now.
+ */
+@Generated
+public class Empty {
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash();
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(Empty.class).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecutePublishedDashboardQueryRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecutePublishedDashboardQueryRequest.java
new file mode 100755
index 000000000..c5223007c
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/ExecutePublishedDashboardQueryRequest.java
@@ -0,0 +1,86 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+/**
+ * Execute query request for published Dashboards. Since published dashboards have the option of
+ * running as the publisher, the datasets, warehouse_id are excluded from the request and instead
+ * read from the source (lakeview-config) via the additional parameters (dashboardName and
+ * dashboardRevisionId)
+ */
+@Generated
+public class ExecutePublishedDashboardQueryRequest {
+ /**
+ * Dashboard name and revision_id is required to retrieve PublishedDatasetDataModel which contains
+ * the list of datasets, warehouse_id, and embedded_credentials
+ */
+ @JsonProperty("dashboard_name")
+ private String dashboardName;
+
+ /** */
+ @JsonProperty("dashboard_revision_id")
+ private String dashboardRevisionId;
+
+ /**
+ * A dashboard schedule can override the warehouse used as compute for processing the published
+ * dashboard queries
+ */
+ @JsonProperty("override_warehouse_id")
+ private String overrideWarehouseId;
+
+ public ExecutePublishedDashboardQueryRequest setDashboardName(String dashboardName) {
+ this.dashboardName = dashboardName;
+ return this;
+ }
+
+ public String getDashboardName() {
+ return dashboardName;
+ }
+
+ public ExecutePublishedDashboardQueryRequest setDashboardRevisionId(String dashboardRevisionId) {
+ this.dashboardRevisionId = dashboardRevisionId;
+ return this;
+ }
+
+ public String getDashboardRevisionId() {
+ return dashboardRevisionId;
+ }
+
+ public ExecutePublishedDashboardQueryRequest setOverrideWarehouseId(String overrideWarehouseId) {
+ this.overrideWarehouseId = overrideWarehouseId;
+ return this;
+ }
+
+ public String getOverrideWarehouseId() {
+ return overrideWarehouseId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ExecutePublishedDashboardQueryRequest that = (ExecutePublishedDashboardQueryRequest) o;
+ return Objects.equals(dashboardName, that.dashboardName)
+ && Objects.equals(dashboardRevisionId, that.dashboardRevisionId)
+ && Objects.equals(overrideWarehouseId, that.overrideWarehouseId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dashboardName, dashboardRevisionId, overrideWarehouseId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ExecutePublishedDashboardQueryRequest.class)
+ .add("dashboardName", dashboardName)
+ .add("dashboardRevisionId", dashboardRevisionId)
+ .add("overrideWarehouseId", overrideWarehouseId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java
index 2548416fd..7e1d475a1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieAPI.java
@@ -163,6 +163,59 @@ public GenieGetMessageQueryResultResponse executeMessageQuery(
return impl.executeMessageQuery(request);
}
+ public GenieGenerateDownloadFullQueryResultResponse generateDownloadFullQueryResult(
+ String spaceId, String conversationId, String messageId, String attachmentId) {
+ return generateDownloadFullQueryResult(
+ new GenieGenerateDownloadFullQueryResultRequest()
+ .setSpaceId(spaceId)
+ .setConversationId(conversationId)
+ .setMessageId(messageId)
+ .setAttachmentId(attachmentId));
+ }
+
+ /**
+ * Initiates a new SQL execution and returns a `download_id` that you can use to track the
+ * progress of the download. The query result is stored in an external link and can be retrieved
+ * using the [Get Download Full Query Result](:method:genie/getdownloadfullqueryresult) API.
+ * Warning: Databricks strongly recommends that you protect the URLs that are returned by the
+ * `EXTERNAL_LINKS` disposition. See [Execute
+ * Statement](:method:statementexecution/executestatement) for more details.
+ */
+ public GenieGenerateDownloadFullQueryResultResponse generateDownloadFullQueryResult(
+ GenieGenerateDownloadFullQueryResultRequest request) {
+ return impl.generateDownloadFullQueryResult(request);
+ }
+
+ public GenieGetDownloadFullQueryResultResponse getDownloadFullQueryResult(
+ String spaceId,
+ String conversationId,
+ String messageId,
+ String attachmentId,
+ String downloadId) {
+ return getDownloadFullQueryResult(
+ new GenieGetDownloadFullQueryResultRequest()
+ .setSpaceId(spaceId)
+ .setConversationId(conversationId)
+ .setMessageId(messageId)
+ .setAttachmentId(attachmentId)
+ .setDownloadId(downloadId));
+ }
+
+ /**
+ * After [Generating a Full Query Result Download](:method:genie/getdownloadfullqueryresult) and
+ * successfully receiving a `download_id`, use this API to poll the download progress. When the
+ * download is complete, the API returns one or more external links to the query result files.
+ * Warning: Databricks strongly recommends that you protect the URLs that are returned by the
+ * `EXTERNAL_LINKS` disposition. You must not set an Authorization header in download requests.
+ * When using the `EXTERNAL_LINKS` disposition, Databricks returns presigned URLs that grant
+ * temporary access to data. See [Execute Statement](:method:statementexecution/executestatement)
+ * for more details.
+ */
+ public GenieGetDownloadFullQueryResultResponse getDownloadFullQueryResult(
+ GenieGetDownloadFullQueryResultRequest request) {
+ return impl.getDownloadFullQueryResult(request);
+ }
+
public GenieMessage getMessage(String spaceId, String conversationId, String messageId) {
return getMessage(
new GenieGetConversationMessageRequest()
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultRequest.java
new file mode 100755
index 000000000..7dc36298f
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultRequest.java
@@ -0,0 +1,86 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GenieGenerateDownloadFullQueryResultRequest {
+ /** Attachment ID */
+ @JsonIgnore private String attachmentId;
+
+ /** Conversation ID */
+ @JsonIgnore private String conversationId;
+
+ /** Message ID */
+ @JsonIgnore private String messageId;
+
+ /** Genie space ID */
+ @JsonIgnore private String spaceId;
+
+ public GenieGenerateDownloadFullQueryResultRequest setAttachmentId(String attachmentId) {
+ this.attachmentId = attachmentId;
+ return this;
+ }
+
+ public String getAttachmentId() {
+ return attachmentId;
+ }
+
+ public GenieGenerateDownloadFullQueryResultRequest setConversationId(String conversationId) {
+ this.conversationId = conversationId;
+ return this;
+ }
+
+ public String getConversationId() {
+ return conversationId;
+ }
+
+ public GenieGenerateDownloadFullQueryResultRequest setMessageId(String messageId) {
+ this.messageId = messageId;
+ return this;
+ }
+
+ public String getMessageId() {
+ return messageId;
+ }
+
+ public GenieGenerateDownloadFullQueryResultRequest setSpaceId(String spaceId) {
+ this.spaceId = spaceId;
+ return this;
+ }
+
+ public String getSpaceId() {
+ return spaceId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenieGenerateDownloadFullQueryResultRequest that =
+ (GenieGenerateDownloadFullQueryResultRequest) o;
+ return Objects.equals(attachmentId, that.attachmentId)
+ && Objects.equals(conversationId, that.conversationId)
+ && Objects.equals(messageId, that.messageId)
+ && Objects.equals(spaceId, that.spaceId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(attachmentId, conversationId, messageId, spaceId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenieGenerateDownloadFullQueryResultRequest.class)
+ .add("attachmentId", attachmentId)
+ .add("conversationId", conversationId)
+ .add("messageId", messageId)
+ .add("spaceId", spaceId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponse.java
new file mode 100755
index 000000000..e51751c8b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGenerateDownloadFullQueryResultResponse.java
@@ -0,0 +1,45 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class GenieGenerateDownloadFullQueryResultResponse {
+ /** Download ID. Use this ID to track the download request in subsequent polling calls */
+ @JsonProperty("download_id")
+ private String downloadId;
+
+ public GenieGenerateDownloadFullQueryResultResponse setDownloadId(String downloadId) {
+ this.downloadId = downloadId;
+ return this;
+ }
+
+ public String getDownloadId() {
+ return downloadId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenieGenerateDownloadFullQueryResultResponse that =
+ (GenieGenerateDownloadFullQueryResultResponse) o;
+ return Objects.equals(downloadId, that.downloadId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(downloadId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenieGenerateDownloadFullQueryResultResponse.class)
+ .add("downloadId", downloadId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultRequest.java
new file mode 100755
index 000000000..73fd97ba2
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultRequest.java
@@ -0,0 +1,102 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GenieGetDownloadFullQueryResultRequest {
+ /** Attachment ID */
+ @JsonIgnore private String attachmentId;
+
+ /** Conversation ID */
+ @JsonIgnore private String conversationId;
+
+ /**
+ * Download ID. This ID is provided by the [Generate Download
+ * endpoint](:method:genie/generateDownloadFullQueryResult)
+ */
+ @JsonIgnore private String downloadId;
+
+ /** Message ID */
+ @JsonIgnore private String messageId;
+
+ /** Genie space ID */
+ @JsonIgnore private String spaceId;
+
+ public GenieGetDownloadFullQueryResultRequest setAttachmentId(String attachmentId) {
+ this.attachmentId = attachmentId;
+ return this;
+ }
+
+ public String getAttachmentId() {
+ return attachmentId;
+ }
+
+ public GenieGetDownloadFullQueryResultRequest setConversationId(String conversationId) {
+ this.conversationId = conversationId;
+ return this;
+ }
+
+ public String getConversationId() {
+ return conversationId;
+ }
+
+ public GenieGetDownloadFullQueryResultRequest setDownloadId(String downloadId) {
+ this.downloadId = downloadId;
+ return this;
+ }
+
+ public String getDownloadId() {
+ return downloadId;
+ }
+
+ public GenieGetDownloadFullQueryResultRequest setMessageId(String messageId) {
+ this.messageId = messageId;
+ return this;
+ }
+
+ public String getMessageId() {
+ return messageId;
+ }
+
+ public GenieGetDownloadFullQueryResultRequest setSpaceId(String spaceId) {
+ this.spaceId = spaceId;
+ return this;
+ }
+
+ public String getSpaceId() {
+ return spaceId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenieGetDownloadFullQueryResultRequest that = (GenieGetDownloadFullQueryResultRequest) o;
+ return Objects.equals(attachmentId, that.attachmentId)
+ && Objects.equals(conversationId, that.conversationId)
+ && Objects.equals(downloadId, that.downloadId)
+ && Objects.equals(messageId, that.messageId)
+ && Objects.equals(spaceId, that.spaceId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(attachmentId, conversationId, downloadId, messageId, spaceId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenieGetDownloadFullQueryResultRequest.class)
+ .add("attachmentId", attachmentId)
+ .add("conversationId", conversationId)
+ .add("downloadId", downloadId)
+ .add("messageId", messageId)
+ .add("spaceId", spaceId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultResponse.java
new file mode 100755
index 000000000..490c5c518
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieGetDownloadFullQueryResultResponse.java
@@ -0,0 +1,48 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class GenieGetDownloadFullQueryResultResponse {
+ /**
+ * SQL Statement Execution response. See [Get status, manifest, and result first
+ * chunk](:method:statementexecution/getstatement) for more details.
+ */
+ @JsonProperty("statement_response")
+ private com.databricks.sdk.service.sql.StatementResponse statementResponse;
+
+ public GenieGetDownloadFullQueryResultResponse setStatementResponse(
+ com.databricks.sdk.service.sql.StatementResponse statementResponse) {
+ this.statementResponse = statementResponse;
+ return this;
+ }
+
+ public com.databricks.sdk.service.sql.StatementResponse getStatementResponse() {
+ return statementResponse;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GenieGetDownloadFullQueryResultResponse that = (GenieGetDownloadFullQueryResultResponse) o;
+ return Objects.equals(statementResponse, that.statementResponse);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(statementResponse);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GenieGetDownloadFullQueryResultResponse.class)
+ .add("statementResponse", statementResponse)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java
index d055781dc..0e62a0a87 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieImpl.java
@@ -86,6 +86,47 @@ public GenieGetMessageQueryResultResponse executeMessageQuery(
}
}
+ @Override
+ public GenieGenerateDownloadFullQueryResultResponse generateDownloadFullQueryResult(
+ GenieGenerateDownloadFullQueryResultRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/genie/spaces/%s/conversations/%s/messages/%s/attachments/%s/downloads",
+ request.getSpaceId(),
+ request.getConversationId(),
+ request.getMessageId(),
+ request.getAttachmentId());
+ try {
+ Request req = new Request("POST", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, GenieGenerateDownloadFullQueryResultResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public GenieGetDownloadFullQueryResultResponse getDownloadFullQueryResult(
+ GenieGetDownloadFullQueryResultRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/genie/spaces/%s/conversations/%s/messages/%s/attachments/%s/downloads/%s",
+ request.getSpaceId(),
+ request.getConversationId(),
+ request.getMessageId(),
+ request.getAttachmentId(),
+ request.getDownloadId());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, GenieGetDownloadFullQueryResultResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public GenieMessage getMessage(GenieGetConversationMessageRequest request) {
String path =
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java
index f0ac59161..ca6e28d0a 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GenieService.java
@@ -36,6 +36,30 @@ GenieGetMessageQueryResultResponse executeMessageAttachmentQuery(
GenieGetMessageQueryResultResponse executeMessageQuery(
GenieExecuteMessageQueryRequest genieExecuteMessageQueryRequest);
+ /**
+ * Initiates a new SQL execution and returns a `download_id` that you can use to track the
+ * progress of the download. The query result is stored in an external link and can be retrieved
+ * using the [Get Download Full Query Result](:method:genie/getdownloadfullqueryresult) API.
+ * Warning: Databricks strongly recommends that you protect the URLs that are returned by the
+ * `EXTERNAL_LINKS` disposition. See [Execute
+ * Statement](:method:statementexecution/executestatement) for more details.
+ */
+ GenieGenerateDownloadFullQueryResultResponse generateDownloadFullQueryResult(
+ GenieGenerateDownloadFullQueryResultRequest genieGenerateDownloadFullQueryResultRequest);
+
+ /**
+ * After [Generating a Full Query Result Download](:method:genie/getdownloadfullqueryresult) and
+ * successfully receiving a `download_id`, use this API to poll the download progress. When the
+ * download is complete, the API returns one or more external links to the query result files.
+ * Warning: Databricks strongly recommends that you protect the URLs that are returned by the
+ * `EXTERNAL_LINKS` disposition. You must not set an Authorization header in download requests.
+ * When using the `EXTERNAL_LINKS` disposition, Databricks returns presigned URLs that grant
+ * temporary access to data. See [Execute Statement](:method:statementexecution/executestatement)
+ * for more details.
+ */
+ GenieGetDownloadFullQueryResultResponse getDownloadFullQueryResult(
+ GenieGetDownloadFullQueryResultRequest genieGetDownloadFullQueryResultRequest);
+
/** Get message from conversation. */
GenieMessage getMessage(GenieGetConversationMessageRequest genieGetConversationMessageRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedRequest.java
new file mode 100755
index 000000000..95f6048f1
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/GetPublishedDashboardEmbeddedRequest.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetPublishedDashboardEmbeddedRequest {
+ /** UUID identifying the published dashboard. */
+ @JsonIgnore private String dashboardId;
+
+ public GetPublishedDashboardEmbeddedRequest setDashboardId(String dashboardId) {
+ this.dashboardId = dashboardId;
+ return this;
+ }
+
+ public String getDashboardId() {
+ return dashboardId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetPublishedDashboardEmbeddedRequest that = (GetPublishedDashboardEmbeddedRequest) o;
+ return Objects.equals(dashboardId, that.dashboardId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dashboardId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetPublishedDashboardEmbeddedRequest.class)
+ .add("dashboardId", dashboardId)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java
index ed46478a1..a7bc6c10d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedAPI.java
@@ -23,6 +23,16 @@ public LakeviewEmbeddedAPI(LakeviewEmbeddedService mock) {
impl = mock;
}
+ public void getPublishedDashboardEmbedded(String dashboardId) {
+ getPublishedDashboardEmbedded(
+ new GetPublishedDashboardEmbeddedRequest().setDashboardId(dashboardId));
+ }
+
+ /** Get the current published dashboard within an embedded context. */
+ public void getPublishedDashboardEmbedded(GetPublishedDashboardEmbeddedRequest request) {
+ impl.getPublishedDashboardEmbedded(request);
+ }
+
public GetPublishedDashboardTokenInfoResponse getPublishedDashboardTokenInfo(String dashboardId) {
return getPublishedDashboardTokenInfo(
new GetPublishedDashboardTokenInfoRequest().setDashboardId(dashboardId));
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java
index 55a489702..171eb1e7f 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedImpl.java
@@ -16,6 +16,21 @@ public LakeviewEmbeddedImpl(ApiClient apiClient) {
this.apiClient = apiClient;
}
+ @Override
+ public void getPublishedDashboardEmbedded(GetPublishedDashboardEmbeddedRequest request) {
+ String path =
+ String.format(
+ "/api/2.0/lakeview/dashboards/%s/published/embedded", request.getDashboardId());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public GetPublishedDashboardTokenInfoResponse getPublishedDashboardTokenInfo(
GetPublishedDashboardTokenInfoRequest request) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java
index 3aa679410..98c1b546d 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/LakeviewEmbeddedService.java
@@ -12,6 +12,10 @@
*/
@Generated
public interface LakeviewEmbeddedService {
+ /** Get the current published dashboard within an embedded context. */
+ void getPublishedDashboardEmbedded(
+ GetPublishedDashboardEmbeddedRequest getPublishedDashboardEmbeddedRequest);
+
/**
* Get a required authorization details and scopes of a published dashboard to mint an OAuth
* token.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PendingStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PendingStatus.java
new file mode 100755
index 000000000..f041070b2
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PendingStatus.java
@@ -0,0 +1,45 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class PendingStatus {
+ /**
+ * The token to poll for result asynchronously Example:
+ * EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
+ */
+ @JsonProperty("data_token")
+ private String dataToken;
+
+ public PendingStatus setDataToken(String dataToken) {
+ this.dataToken = dataToken;
+ return this;
+ }
+
+ public String getDataToken() {
+ return dataToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ PendingStatus that = (PendingStatus) o;
+ return Objects.equals(dataToken, that.dataToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dataToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(PendingStatus.class).add("dataToken", dataToken).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollPublishedQueryStatusRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollPublishedQueryStatusRequest.java
new file mode 100755
index 000000000..e34c7af59
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollPublishedQueryStatusRequest.java
@@ -0,0 +1,79 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class PollPublishedQueryStatusRequest {
+ /** */
+ @JsonIgnore
+ @QueryParam("dashboard_name")
+ private String dashboardName;
+
+ /** */
+ @JsonIgnore
+ @QueryParam("dashboard_revision_id")
+ private String dashboardRevisionId;
+
+ /** Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ */
+ @JsonIgnore
+ @QueryParam("tokens")
+ private Collection tokens;
+
+ public PollPublishedQueryStatusRequest setDashboardName(String dashboardName) {
+ this.dashboardName = dashboardName;
+ return this;
+ }
+
+ public String getDashboardName() {
+ return dashboardName;
+ }
+
+ public PollPublishedQueryStatusRequest setDashboardRevisionId(String dashboardRevisionId) {
+ this.dashboardRevisionId = dashboardRevisionId;
+ return this;
+ }
+
+ public String getDashboardRevisionId() {
+ return dashboardRevisionId;
+ }
+
+ public PollPublishedQueryStatusRequest setTokens(Collection tokens) {
+ this.tokens = tokens;
+ return this;
+ }
+
+ public Collection getTokens() {
+ return tokens;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ PollPublishedQueryStatusRequest that = (PollPublishedQueryStatusRequest) o;
+ return Objects.equals(dashboardName, that.dashboardName)
+ && Objects.equals(dashboardRevisionId, that.dashboardRevisionId)
+ && Objects.equals(tokens, that.tokens);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dashboardName, dashboardRevisionId, tokens);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(PollPublishedQueryStatusRequest.class)
+ .add("dashboardName", dashboardName)
+ .add("dashboardRevisionId", dashboardRevisionId)
+ .add("tokens", tokens)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponse.java
new file mode 100755
index 000000000..778e1d961
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponse.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class PollQueryStatusResponse {
+ /** */
+ @JsonProperty("data")
+ private Collection data;
+
+ public PollQueryStatusResponse setData(Collection data) {
+ this.data = data;
+ return this;
+ }
+
+ public Collection getData() {
+ return data;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ PollQueryStatusResponse that = (PollQueryStatusResponse) o;
+ return Objects.equals(data, that.data);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(data);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(PollQueryStatusResponse.class).add("data", data).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponseData.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponseData.java
new file mode 100755
index 000000000..9de9b2743
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/PollQueryStatusResponseData.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class PollQueryStatusResponseData {
+ /** */
+ @JsonProperty("status")
+ private QueryResponseStatus status;
+
+ public PollQueryStatusResponseData setStatus(QueryResponseStatus status) {
+ this.status = status;
+ return this;
+ }
+
+ public QueryResponseStatus getStatus() {
+ return status;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ PollQueryStatusResponseData that = (PollQueryStatusResponseData) o;
+ return Objects.equals(status, that.status);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(status);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(PollQueryStatusResponseData.class).add("status", status).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionAPI.java
new file mode 100755
index 000000000..eb016a2f8
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionAPI.java
@@ -0,0 +1,68 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** Query execution APIs for AI / BI Dashboards */
+@Generated
+public class QueryExecutionAPI {
+ private static final Logger LOG = LoggerFactory.getLogger(QueryExecutionAPI.class);
+
+ private final QueryExecutionService impl;
+
+ /** Regular-use constructor */
+ public QueryExecutionAPI(ApiClient apiClient) {
+ impl = new QueryExecutionImpl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public QueryExecutionAPI(QueryExecutionService mock) {
+ impl = mock;
+ }
+
+ public CancelQueryExecutionResponse cancelPublishedQueryExecution(
+ String dashboardName, String dashboardRevisionId) {
+ return cancelPublishedQueryExecution(
+ new CancelPublishedQueryExecutionRequest()
+ .setDashboardName(dashboardName)
+ .setDashboardRevisionId(dashboardRevisionId));
+ }
+
+ /** Cancel the results for the a query for a published, embedded dashboard. */
+ public CancelQueryExecutionResponse cancelPublishedQueryExecution(
+ CancelPublishedQueryExecutionRequest request) {
+ return impl.cancelPublishedQueryExecution(request);
+ }
+
+ public void executePublishedDashboardQuery(String dashboardName, String dashboardRevisionId) {
+ executePublishedDashboardQuery(
+ new ExecutePublishedDashboardQueryRequest()
+ .setDashboardName(dashboardName)
+ .setDashboardRevisionId(dashboardRevisionId));
+ }
+
+ /** Execute a query for a published dashboard. */
+ public void executePublishedDashboardQuery(ExecutePublishedDashboardQueryRequest request) {
+ impl.executePublishedDashboardQuery(request);
+ }
+
+ public PollQueryStatusResponse pollPublishedQueryStatus(
+ String dashboardName, String dashboardRevisionId) {
+ return pollPublishedQueryStatus(
+ new PollPublishedQueryStatusRequest()
+ .setDashboardName(dashboardName)
+ .setDashboardRevisionId(dashboardRevisionId));
+ }
+
+ /** Poll the results for the a query for a published, embedded dashboard. */
+ public PollQueryStatusResponse pollPublishedQueryStatus(PollPublishedQueryStatusRequest request) {
+ return impl.pollPublishedQueryStatus(request);
+ }
+
+ public QueryExecutionService impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionImpl.java
new file mode 100755
index 000000000..19efc614a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionImpl.java
@@ -0,0 +1,59 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of QueryExecution */
+@Generated
+class QueryExecutionImpl implements QueryExecutionService {
+ private final ApiClient apiClient;
+
+ public QueryExecutionImpl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public CancelQueryExecutionResponse cancelPublishedQueryExecution(
+ CancelPublishedQueryExecutionRequest request) {
+ String path = "/api/2.0/lakeview-query/query/published";
+ try {
+ Request req = new Request("DELETE", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, CancelQueryExecutionResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public void executePublishedDashboardQuery(ExecutePublishedDashboardQueryRequest request) {
+ String path = "/api/2.0/lakeview-query/query/published";
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ apiClient.execute(req, Void.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public PollQueryStatusResponse pollPublishedQueryStatus(PollPublishedQueryStatusRequest request) {
+ String path = "/api/2.0/lakeview-query/query/published";
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, PollQueryStatusResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionService.java
new file mode 100755
index 000000000..d30cda5b6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryExecutionService.java
@@ -0,0 +1,26 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * Query execution APIs for AI / BI Dashboards
+ *
+ * This is the high-level interface, that contains generated methods.
+ *
+ *
Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface QueryExecutionService {
+ /** Cancel the results for the a query for a published, embedded dashboard. */
+ CancelQueryExecutionResponse cancelPublishedQueryExecution(
+ CancelPublishedQueryExecutionRequest cancelPublishedQueryExecutionRequest);
+
+ /** Execute a query for a published dashboard. */
+ void executePublishedDashboardQuery(
+ ExecutePublishedDashboardQueryRequest executePublishedDashboardQueryRequest);
+
+ /** Poll the results for the a query for a published, embedded dashboard. */
+ PollQueryStatusResponse pollPublishedQueryStatus(
+ PollPublishedQueryStatusRequest pollPublishedQueryStatusRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryResponseStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryResponseStatus.java
new file mode 100755
index 000000000..a57d202ef
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/QueryResponseStatus.java
@@ -0,0 +1,108 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class QueryResponseStatus {
+ /** */
+ @JsonProperty("canceled")
+ private Empty canceled;
+
+ /** */
+ @JsonProperty("closed")
+ private Empty closed;
+
+ /** */
+ @JsonProperty("pending")
+ private PendingStatus pending;
+
+ /**
+ * The statement id in format(01eef5da-c56e-1f36-bafa-21906587d6ba) The statement_id should be
+ * identical to data_token in SuccessStatus and PendingStatus. This field is created for audit
+ * logging purpose to record the statement_id of all QueryResponseStatus.
+ */
+ @JsonProperty("statement_id")
+ private String statementId;
+
+ /** */
+ @JsonProperty("success")
+ private SuccessStatus success;
+
+ public QueryResponseStatus setCanceled(Empty canceled) {
+ this.canceled = canceled;
+ return this;
+ }
+
+ public Empty getCanceled() {
+ return canceled;
+ }
+
+ public QueryResponseStatus setClosed(Empty closed) {
+ this.closed = closed;
+ return this;
+ }
+
+ public Empty getClosed() {
+ return closed;
+ }
+
+ public QueryResponseStatus setPending(PendingStatus pending) {
+ this.pending = pending;
+ return this;
+ }
+
+ public PendingStatus getPending() {
+ return pending;
+ }
+
+ public QueryResponseStatus setStatementId(String statementId) {
+ this.statementId = statementId;
+ return this;
+ }
+
+ public String getStatementId() {
+ return statementId;
+ }
+
+ public QueryResponseStatus setSuccess(SuccessStatus success) {
+ this.success = success;
+ return this;
+ }
+
+ public SuccessStatus getSuccess() {
+ return success;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ QueryResponseStatus that = (QueryResponseStatus) o;
+ return Objects.equals(canceled, that.canceled)
+ && Objects.equals(closed, that.closed)
+ && Objects.equals(pending, that.pending)
+ && Objects.equals(statementId, that.statementId)
+ && Objects.equals(success, that.success);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(canceled, closed, pending, statementId, success);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(QueryResponseStatus.class)
+ .add("canceled", canceled)
+ .add("closed", closed)
+ .add("pending", pending)
+ .add("statementId", statementId)
+ .add("success", success)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SuccessStatus.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SuccessStatus.java
new file mode 100755
index 000000000..c54d199d3
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/dashboards/SuccessStatus.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.dashboards;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class SuccessStatus {
+ /**
+ * The token to poll for result asynchronously Example:
+ * EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
+ */
+ @JsonProperty("data_token")
+ private String dataToken;
+
+ /** Whether the query result is truncated (either by byte limit or row limit) */
+ @JsonProperty("truncated")
+ private Boolean truncated;
+
+ public SuccessStatus setDataToken(String dataToken) {
+ this.dataToken = dataToken;
+ return this;
+ }
+
+ public String getDataToken() {
+ return dataToken;
+ }
+
+ public SuccessStatus setTruncated(Boolean truncated) {
+ this.truncated = truncated;
+ return this;
+ }
+
+ public Boolean getTruncated() {
+ return truncated;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ SuccessStatus that = (SuccessStatus) o;
+ return Objects.equals(dataToken, that.dataToken) && Objects.equals(truncated, that.truncated);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(dataToken, truncated);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(SuccessStatus.class)
+ .add("dataToken", dataToken)
+ .add("truncated", truncated)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java
index c7baf5d89..7398f4a29 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseAPI.java
@@ -125,6 +125,15 @@ public void deleteSyncedDatabaseTable(DeleteSyncedDatabaseTableRequest request)
impl.deleteSyncedDatabaseTable(request);
}
+ public DatabaseInstance failoverDatabaseInstance(String name) {
+ return failoverDatabaseInstance(new FailoverDatabaseInstanceRequest().setName(name));
+ }
+
+ /** Failover the primary node of a Database Instance to a secondary. */
+ public DatabaseInstance failoverDatabaseInstance(FailoverDatabaseInstanceRequest request) {
+ return impl.failoverDatabaseInstance(request);
+ }
+
/** Find a Database Instance by uid. */
public DatabaseInstance findDatabaseInstanceByUid(FindDatabaseInstanceByUidRequest request) {
return impl.findDatabaseInstanceByUid(request);
@@ -181,6 +190,25 @@ public SyncedDatabaseTable getSyncedDatabaseTable(GetSyncedDatabaseTableRequest
return impl.getSyncedDatabaseTable(request);
}
+ public Iterable listDatabaseCatalogs(String instanceName) {
+ return listDatabaseCatalogs(new ListDatabaseCatalogsRequest().setInstanceName(instanceName));
+ }
+
+ /** List all Database Catalogs within a Database Instance. */
+ public Iterable listDatabaseCatalogs(ListDatabaseCatalogsRequest request) {
+ return new Paginator<>(
+ request,
+ impl::listDatabaseCatalogs,
+ ListDatabaseCatalogsResponse::getDatabaseCatalogs,
+ response -> {
+ String token = response.getNextPageToken();
+ if (token == null || token.isEmpty()) {
+ return null;
+ }
+ return request.setPageToken(token);
+ });
+ }
+
public Iterable listDatabaseInstanceRoles(String instanceName) {
return listDatabaseInstanceRoles(
new ListDatabaseInstanceRolesRequest().setInstanceName(instanceName));
@@ -217,6 +245,41 @@ public Iterable listDatabaseInstances(ListDatabaseInstancesReq
});
}
+ public Iterable listSyncedDatabaseTables(String instanceName) {
+ return listSyncedDatabaseTables(
+ new ListSyncedDatabaseTablesRequest().setInstanceName(instanceName));
+ }
+
+ /** List all Synced Database Tables within a Database Instance. */
+ public Iterable listSyncedDatabaseTables(
+ ListSyncedDatabaseTablesRequest request) {
+ return new Paginator<>(
+ request,
+ impl::listSyncedDatabaseTables,
+ ListSyncedDatabaseTablesResponse::getSyncedTables,
+ response -> {
+ String token = response.getNextPageToken();
+ if (token == null || token.isEmpty()) {
+ return null;
+ }
+ return request.setPageToken(token);
+ });
+ }
+
+ public DatabaseCatalog updateDatabaseCatalog(
+ String name, DatabaseCatalog databaseCatalog, String updateMask) {
+ return updateDatabaseCatalog(
+ new UpdateDatabaseCatalogRequest()
+ .setName(name)
+ .setDatabaseCatalog(databaseCatalog)
+ .setUpdateMask(updateMask));
+ }
+
+ /** Updated a Database Catalog. */
+ public DatabaseCatalog updateDatabaseCatalog(UpdateDatabaseCatalogRequest request) {
+ return impl.updateDatabaseCatalog(request);
+ }
+
public DatabaseInstance updateDatabaseInstance(
String name, DatabaseInstance databaseInstance, String updateMask) {
return updateDatabaseInstance(
@@ -231,6 +294,20 @@ public DatabaseInstance updateDatabaseInstance(UpdateDatabaseInstanceRequest req
return impl.updateDatabaseInstance(request);
}
+ public SyncedDatabaseTable updateSyncedDatabaseTable(
+ String name, SyncedDatabaseTable syncedTable, String updateMask) {
+ return updateSyncedDatabaseTable(
+ new UpdateSyncedDatabaseTableRequest()
+ .setName(name)
+ .setSyncedTable(syncedTable)
+ .setUpdateMask(updateMask));
+ }
+
+ /** Update a Synced Database Table. */
+ public SyncedDatabaseTable updateSyncedDatabaseTable(UpdateSyncedDatabaseTableRequest request) {
+ return impl.updateSyncedDatabaseTable(request);
+ }
+
public DatabaseService impl() {
return impl;
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java
index 702b11a4c..5caac9964 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseImpl.java
@@ -156,6 +156,20 @@ public void deleteSyncedDatabaseTable(DeleteSyncedDatabaseTableRequest request)
}
}
+ @Override
+ public DatabaseInstance failoverDatabaseInstance(FailoverDatabaseInstanceRequest request) {
+ String path = String.format("/api/2.0/database/instances/%s/failover", request.getName());
+ try {
+ Request req = new Request("POST", path, apiClient.serialize(request));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, DatabaseInstance.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public DatabaseInstance findDatabaseInstanceByUid(FindDatabaseInstanceByUidRequest request) {
String path = "/api/2.0/database/instances:findByUid";
@@ -251,6 +265,20 @@ public SyncedDatabaseTable getSyncedDatabaseTable(GetSyncedDatabaseTableRequest
}
}
+ @Override
+ public ListDatabaseCatalogsResponse listDatabaseCatalogs(ListDatabaseCatalogsRequest request) {
+ String path =
+ String.format("/api/2.0/database/instances/%s/catalogs", request.getInstanceName());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ListDatabaseCatalogsResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public ListDatabaseInstanceRolesResponse listDatabaseInstanceRoles(
ListDatabaseInstanceRolesRequest request) {
@@ -278,6 +306,35 @@ public ListDatabaseInstancesResponse listDatabaseInstances(ListDatabaseInstances
}
}
+ @Override
+ public ListSyncedDatabaseTablesResponse listSyncedDatabaseTables(
+ ListSyncedDatabaseTablesRequest request) {
+ String path =
+ String.format("/api/2.0/database/instances/%s/synced_tables", request.getInstanceName());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, ListSyncedDatabaseTablesResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public DatabaseCatalog updateDatabaseCatalog(UpdateDatabaseCatalogRequest request) {
+ String path = String.format("/api/2.0/database/catalogs/%s", request.getName());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getDatabaseCatalog()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, DatabaseCatalog.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public DatabaseInstance updateDatabaseInstance(UpdateDatabaseInstanceRequest request) {
String path = String.format("/api/2.0/database/instances/%s", request.getName());
@@ -291,4 +348,18 @@ public DatabaseInstance updateDatabaseInstance(UpdateDatabaseInstanceRequest req
throw new DatabricksException("IO error: " + e.getMessage(), e);
}
}
+
+ @Override
+ public SyncedDatabaseTable updateSyncedDatabaseTable(UpdateSyncedDatabaseTableRequest request) {
+ String path = String.format("/api/2.0/database/synced_tables/%s", request.getName());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getSyncedTable()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, SyncedDatabaseTable.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstance.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstance.java
index bb57e65ef..ca6cededc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstance.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseInstance.java
@@ -13,6 +13,14 @@
*/
@Generated
public class DatabaseInstance {
+ /**
+ * The desired budget policy to associate with the instance. This field is only returned on
+ * create/update responses, and represents the customer provided budget policy. See
+ * effective_budget_policy_id for the policy that is actually applied to the instance.
+ */
+ @JsonProperty("budget_policy_id")
+ private String budgetPolicyId;
+
/** The sku of the instance. Valid values are "CU_1", "CU_2", "CU_4", "CU_8". */
@JsonProperty("capacity")
private String capacity;
@@ -29,6 +37,20 @@ public class DatabaseInstance {
@JsonProperty("creator")
private String creator;
+ /** The policy that is applied to the instance. */
+ @JsonProperty("effective_budget_policy_id")
+ private String effectiveBudgetPolicyId;
+
+ /**
+ * xref AIP-129. `enable_pg_native_login` is owned by the client, while
+ * `effective_enable_pg_native_login` is owned by the server. `enable_pg_native_login` will only
+ * be set in Create/Update response messages if and only if the user provides the field via the
+ * request. `effective_enable_pg_native_login` on the other hand will always bet set in all
+ * response messages (Create/Update/Get/List).
+ */
+ @JsonProperty("effective_enable_pg_native_login")
+ private Boolean effectiveEnablePgNativeLogin;
+
/**
* xref AIP-129. `enable_readable_secondaries` is owned by the client, while
* `effective_enable_readable_secondaries` is owned by the server. `enable_readable_secondaries`
@@ -67,6 +89,10 @@ public class DatabaseInstance {
@JsonProperty("effective_stopped")
private Boolean effectiveStopped;
+ /** Whether the instance has PG native password login enabled. Defaults to true. */
+ @JsonProperty("enable_pg_native_login")
+ private Boolean enablePgNativeLogin;
+
/** Whether to enable secondaries to serve read-only traffic. Defaults to false. */
@JsonProperty("enable_readable_secondaries")
private Boolean enableReadableSecondaries;
@@ -124,6 +150,15 @@ public class DatabaseInstance {
@JsonProperty("uid")
private String uid;
+ public DatabaseInstance setBudgetPolicyId(String budgetPolicyId) {
+ this.budgetPolicyId = budgetPolicyId;
+ return this;
+ }
+
+ public String getBudgetPolicyId() {
+ return budgetPolicyId;
+ }
+
public DatabaseInstance setCapacity(String capacity) {
this.capacity = capacity;
return this;
@@ -160,6 +195,24 @@ public String getCreator() {
return creator;
}
+ public DatabaseInstance setEffectiveBudgetPolicyId(String effectiveBudgetPolicyId) {
+ this.effectiveBudgetPolicyId = effectiveBudgetPolicyId;
+ return this;
+ }
+
+ public String getEffectiveBudgetPolicyId() {
+ return effectiveBudgetPolicyId;
+ }
+
+ public DatabaseInstance setEffectiveEnablePgNativeLogin(Boolean effectiveEnablePgNativeLogin) {
+ this.effectiveEnablePgNativeLogin = effectiveEnablePgNativeLogin;
+ return this;
+ }
+
+ public Boolean getEffectiveEnablePgNativeLogin() {
+ return effectiveEnablePgNativeLogin;
+ }
+
public DatabaseInstance setEffectiveEnableReadableSecondaries(
Boolean effectiveEnableReadableSecondaries) {
this.effectiveEnableReadableSecondaries = effectiveEnableReadableSecondaries;
@@ -197,6 +250,15 @@ public Boolean getEffectiveStopped() {
return effectiveStopped;
}
+ public DatabaseInstance setEnablePgNativeLogin(Boolean enablePgNativeLogin) {
+ this.enablePgNativeLogin = enablePgNativeLogin;
+ return this;
+ }
+
+ public Boolean getEnablePgNativeLogin() {
+ return enablePgNativeLogin;
+ }
+
public DatabaseInstance setEnableReadableSecondaries(Boolean enableReadableSecondaries) {
this.enableReadableSecondaries = enableReadableSecondaries;
return this;
@@ -301,15 +363,19 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DatabaseInstance that = (DatabaseInstance) o;
- return Objects.equals(capacity, that.capacity)
+ return Objects.equals(budgetPolicyId, that.budgetPolicyId)
+ && Objects.equals(capacity, that.capacity)
&& Objects.equals(childInstanceRefs, that.childInstanceRefs)
&& Objects.equals(creationTime, that.creationTime)
&& Objects.equals(creator, that.creator)
+ && Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId)
+ && Objects.equals(effectiveEnablePgNativeLogin, that.effectiveEnablePgNativeLogin)
&& Objects.equals(
effectiveEnableReadableSecondaries, that.effectiveEnableReadableSecondaries)
&& Objects.equals(effectiveNodeCount, that.effectiveNodeCount)
&& Objects.equals(effectiveRetentionWindowInDays, that.effectiveRetentionWindowInDays)
&& Objects.equals(effectiveStopped, that.effectiveStopped)
+ && Objects.equals(enablePgNativeLogin, that.enablePgNativeLogin)
&& Objects.equals(enableReadableSecondaries, that.enableReadableSecondaries)
&& Objects.equals(name, that.name)
&& Objects.equals(nodeCount, that.nodeCount)
@@ -326,14 +392,18 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
return Objects.hash(
+ budgetPolicyId,
capacity,
childInstanceRefs,
creationTime,
creator,
+ effectiveBudgetPolicyId,
+ effectiveEnablePgNativeLogin,
effectiveEnableReadableSecondaries,
effectiveNodeCount,
effectiveRetentionWindowInDays,
effectiveStopped,
+ enablePgNativeLogin,
enableReadableSecondaries,
name,
nodeCount,
@@ -350,14 +420,18 @@ public int hashCode() {
@Override
public String toString() {
return new ToStringer(DatabaseInstance.class)
+ .add("budgetPolicyId", budgetPolicyId)
.add("capacity", capacity)
.add("childInstanceRefs", childInstanceRefs)
.add("creationTime", creationTime)
.add("creator", creator)
+ .add("effectiveBudgetPolicyId", effectiveBudgetPolicyId)
+ .add("effectiveEnablePgNativeLogin", effectiveEnablePgNativeLogin)
.add("effectiveEnableReadableSecondaries", effectiveEnableReadableSecondaries)
.add("effectiveNodeCount", effectiveNodeCount)
.add("effectiveRetentionWindowInDays", effectiveRetentionWindowInDays)
.add("effectiveStopped", effectiveStopped)
+ .add("enablePgNativeLogin", enablePgNativeLogin)
.add("enableReadableSecondaries", enableReadableSecondaries)
.add("name", name)
.add("nodeCount", nodeCount)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java
index 8f67c97ff..e552946e2 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseService.java
@@ -49,6 +49,10 @@ void deleteDatabaseInstanceRole(
/** Delete a Synced Database Table. */
void deleteSyncedDatabaseTable(DeleteSyncedDatabaseTableRequest deleteSyncedDatabaseTableRequest);
+ /** Failover the primary node of a Database Instance to a secondary. */
+ DatabaseInstance failoverDatabaseInstance(
+ FailoverDatabaseInstanceRequest failoverDatabaseInstanceRequest);
+
/** Find a Database Instance by uid. */
DatabaseInstance findDatabaseInstanceByUid(
FindDatabaseInstanceByUidRequest findDatabaseInstanceByUidRequest);
@@ -74,6 +78,10 @@ DatabaseInstanceRole getDatabaseInstanceRole(
SyncedDatabaseTable getSyncedDatabaseTable(
GetSyncedDatabaseTableRequest getSyncedDatabaseTableRequest);
+ /** List all Database Catalogs within a Database Instance. */
+ ListDatabaseCatalogsResponse listDatabaseCatalogs(
+ ListDatabaseCatalogsRequest listDatabaseCatalogsRequest);
+
/** START OF PG ROLE APIs Section */
ListDatabaseInstanceRolesResponse listDatabaseInstanceRoles(
ListDatabaseInstanceRolesRequest listDatabaseInstanceRolesRequest);
@@ -82,7 +90,18 @@ ListDatabaseInstanceRolesResponse listDatabaseInstanceRoles(
ListDatabaseInstancesResponse listDatabaseInstances(
ListDatabaseInstancesRequest listDatabaseInstancesRequest);
+ /** List all Synced Database Tables within a Database Instance. */
+ ListSyncedDatabaseTablesResponse listSyncedDatabaseTables(
+ ListSyncedDatabaseTablesRequest listSyncedDatabaseTablesRequest);
+
+ /** Updated a Database Catalog. */
+ DatabaseCatalog updateDatabaseCatalog(UpdateDatabaseCatalogRequest updateDatabaseCatalogRequest);
+
/** Update a Database Instance. */
DatabaseInstance updateDatabaseInstance(
UpdateDatabaseInstanceRequest updateDatabaseInstanceRequest);
+
+ /** Update a Synced Database Table. */
+ SyncedDatabaseTable updateSyncedDatabaseTable(
+ UpdateSyncedDatabaseTableRequest updateSyncedDatabaseTableRequest);
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java
index 8bae3d07a..42e127417 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/DatabaseTable.java
@@ -37,6 +37,10 @@ public class DatabaseTable {
@JsonProperty("name")
private String name;
+ /** Data serving REST API URL for this table */
+ @JsonProperty("table_serving_url")
+ private String tableServingUrl;
+
public DatabaseTable setDatabaseInstanceName(String databaseInstanceName) {
this.databaseInstanceName = databaseInstanceName;
return this;
@@ -64,6 +68,15 @@ public String getName() {
return name;
}
+ public DatabaseTable setTableServingUrl(String tableServingUrl) {
+ this.tableServingUrl = tableServingUrl;
+ return this;
+ }
+
+ public String getTableServingUrl() {
+ return tableServingUrl;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
@@ -71,12 +84,13 @@ public boolean equals(Object o) {
DatabaseTable that = (DatabaseTable) o;
return Objects.equals(databaseInstanceName, that.databaseInstanceName)
&& Objects.equals(logicalDatabaseName, that.logicalDatabaseName)
- && Objects.equals(name, that.name);
+ && Objects.equals(name, that.name)
+ && Objects.equals(tableServingUrl, that.tableServingUrl);
}
@Override
public int hashCode() {
- return Objects.hash(databaseInstanceName, logicalDatabaseName, name);
+ return Objects.hash(databaseInstanceName, logicalDatabaseName, name, tableServingUrl);
}
@Override
@@ -85,6 +99,7 @@ public String toString() {
.add("databaseInstanceName", databaseInstanceName)
.add("logicalDatabaseName", logicalDatabaseName)
.add("name", name)
+ .add("tableServingUrl", tableServingUrl)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FailoverDatabaseInstanceRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FailoverDatabaseInstanceRequest.java
new file mode 100755
index 000000000..24cda6829
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/FailoverDatabaseInstanceRequest.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class FailoverDatabaseInstanceRequest {
+ /** */
+ @JsonProperty("failover_target_database_instance_name")
+ private String failoverTargetDatabaseInstanceName;
+
+ /** Name of the instance to failover. */
+ @JsonIgnore private String name;
+
+ public FailoverDatabaseInstanceRequest setFailoverTargetDatabaseInstanceName(
+ String failoverTargetDatabaseInstanceName) {
+ this.failoverTargetDatabaseInstanceName = failoverTargetDatabaseInstanceName;
+ return this;
+ }
+
+ public String getFailoverTargetDatabaseInstanceName() {
+ return failoverTargetDatabaseInstanceName;
+ }
+
+ public FailoverDatabaseInstanceRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ FailoverDatabaseInstanceRequest that = (FailoverDatabaseInstanceRequest) o;
+ return Objects.equals(
+ failoverTargetDatabaseInstanceName, that.failoverTargetDatabaseInstanceName)
+ && Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(failoverTargetDatabaseInstanceName, name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(FailoverDatabaseInstanceRequest.class)
+ .add("failoverTargetDatabaseInstanceName", failoverTargetDatabaseInstanceName)
+ .add("name", name)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseCatalogsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseCatalogsRequest.java
new file mode 100755
index 000000000..d82b5746d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseCatalogsRequest.java
@@ -0,0 +1,79 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListDatabaseCatalogsRequest {
+ /** Name of the instance to get database catalogs for. */
+ @JsonIgnore private String instanceName;
+
+ /** Upper bound for items returned. */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /**
+ * Pagination token to go to the next page of synced database tables. Requests first page if
+ * absent.
+ */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListDatabaseCatalogsRequest setInstanceName(String instanceName) {
+ this.instanceName = instanceName;
+ return this;
+ }
+
+ public String getInstanceName() {
+ return instanceName;
+ }
+
+ public ListDatabaseCatalogsRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListDatabaseCatalogsRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListDatabaseCatalogsRequest that = (ListDatabaseCatalogsRequest) o;
+ return Objects.equals(instanceName, that.instanceName)
+ && Objects.equals(pageSize, that.pageSize)
+ && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(instanceName, pageSize, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListDatabaseCatalogsRequest.class)
+ .add("instanceName", instanceName)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseCatalogsResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseCatalogsResponse.java
new file mode 100755
index 000000000..0eaf899c0
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListDatabaseCatalogsResponse.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ListDatabaseCatalogsResponse {
+ /** */
+ @JsonProperty("database_catalogs")
+ private Collection databaseCatalogs;
+
+ /** Pagination token to request the next page of database catalogs. */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ public ListDatabaseCatalogsResponse setDatabaseCatalogs(
+ Collection databaseCatalogs) {
+ this.databaseCatalogs = databaseCatalogs;
+ return this;
+ }
+
+ public Collection getDatabaseCatalogs() {
+ return databaseCatalogs;
+ }
+
+ public ListDatabaseCatalogsResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListDatabaseCatalogsResponse that = (ListDatabaseCatalogsResponse) o;
+ return Objects.equals(databaseCatalogs, that.databaseCatalogs)
+ && Objects.equals(nextPageToken, that.nextPageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(databaseCatalogs, nextPageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListDatabaseCatalogsResponse.class)
+ .add("databaseCatalogs", databaseCatalogs)
+ .add("nextPageToken", nextPageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListSyncedDatabaseTablesRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListSyncedDatabaseTablesRequest.java
new file mode 100755
index 000000000..219805f8d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListSyncedDatabaseTablesRequest.java
@@ -0,0 +1,79 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class ListSyncedDatabaseTablesRequest {
+ /** Name of the instance to get synced tables for. */
+ @JsonIgnore private String instanceName;
+
+ /** Upper bound for items returned. */
+ @JsonIgnore
+ @QueryParam("page_size")
+ private Long pageSize;
+
+ /**
+ * Pagination token to go to the next page of synced database tables. Requests first page if
+ * absent.
+ */
+ @JsonIgnore
+ @QueryParam("page_token")
+ private String pageToken;
+
+ public ListSyncedDatabaseTablesRequest setInstanceName(String instanceName) {
+ this.instanceName = instanceName;
+ return this;
+ }
+
+ public String getInstanceName() {
+ return instanceName;
+ }
+
+ public ListSyncedDatabaseTablesRequest setPageSize(Long pageSize) {
+ this.pageSize = pageSize;
+ return this;
+ }
+
+ public Long getPageSize() {
+ return pageSize;
+ }
+
+ public ListSyncedDatabaseTablesRequest setPageToken(String pageToken) {
+ this.pageToken = pageToken;
+ return this;
+ }
+
+ public String getPageToken() {
+ return pageToken;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListSyncedDatabaseTablesRequest that = (ListSyncedDatabaseTablesRequest) o;
+ return Objects.equals(instanceName, that.instanceName)
+ && Objects.equals(pageSize, that.pageSize)
+ && Objects.equals(pageToken, that.pageToken);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(instanceName, pageSize, pageToken);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListSyncedDatabaseTablesRequest.class)
+ .add("instanceName", instanceName)
+ .add("pageSize", pageSize)
+ .add("pageToken", pageToken)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListSyncedDatabaseTablesResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListSyncedDatabaseTablesResponse.java
new file mode 100755
index 000000000..fbc9093a6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/ListSyncedDatabaseTablesResponse.java
@@ -0,0 +1,61 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ListSyncedDatabaseTablesResponse {
+ /** Pagination token to request the next page of synced tables. */
+ @JsonProperty("next_page_token")
+ private String nextPageToken;
+
+ /** */
+ @JsonProperty("synced_tables")
+ private Collection syncedTables;
+
+ public ListSyncedDatabaseTablesResponse setNextPageToken(String nextPageToken) {
+ this.nextPageToken = nextPageToken;
+ return this;
+ }
+
+ public String getNextPageToken() {
+ return nextPageToken;
+ }
+
+ public ListSyncedDatabaseTablesResponse setSyncedTables(
+ Collection syncedTables) {
+ this.syncedTables = syncedTables;
+ return this;
+ }
+
+ public Collection getSyncedTables() {
+ return syncedTables;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ListSyncedDatabaseTablesResponse that = (ListSyncedDatabaseTablesResponse) o;
+ return Objects.equals(nextPageToken, that.nextPageToken)
+ && Objects.equals(syncedTables, that.syncedTables);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(nextPageToken, syncedTables);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ListSyncedDatabaseTablesResponse.class)
+ .add("nextPageToken", nextPageToken)
+ .add("syncedTables", syncedTables)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java
index f3a3befad..8b9f1fd19 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/NewPipelineSpec.java
@@ -13,6 +13,10 @@
*/
@Generated
public class NewPipelineSpec {
+ /** Budget policy of this pipeline. */
+ @JsonProperty("budget_policy_id")
+ private String budgetPolicyId;
+
/**
* This field needs to be specified if the destination catalog is a managed postgres catalog.
*
@@ -31,6 +35,15 @@ public class NewPipelineSpec {
@JsonProperty("storage_schema")
private String storageSchema;
+ public NewPipelineSpec setBudgetPolicyId(String budgetPolicyId) {
+ this.budgetPolicyId = budgetPolicyId;
+ return this;
+ }
+
+ public String getBudgetPolicyId() {
+ return budgetPolicyId;
+ }
+
public NewPipelineSpec setStorageCatalog(String storageCatalog) {
this.storageCatalog = storageCatalog;
return this;
@@ -54,18 +67,20 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
NewPipelineSpec that = (NewPipelineSpec) o;
- return Objects.equals(storageCatalog, that.storageCatalog)
+ return Objects.equals(budgetPolicyId, that.budgetPolicyId)
+ && Objects.equals(storageCatalog, that.storageCatalog)
&& Objects.equals(storageSchema, that.storageSchema);
}
@Override
public int hashCode() {
- return Objects.hash(storageCatalog, storageSchema);
+ return Objects.hash(budgetPolicyId, storageCatalog, storageSchema);
}
@Override
public String toString() {
return new ToStringer(NewPipelineSpec.class)
+ .add("budgetPolicyId", budgetPolicyId)
.add("storageCatalog", storageCatalog)
.add("storageSchema", storageSchema)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java
index dd1ff0837..5d5226238 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/SyncedDatabaseTable.java
@@ -47,6 +47,10 @@ public class SyncedDatabaseTable {
@JsonProperty("spec")
private SyncedTableSpec spec;
+ /** Data serving REST API URL for this table */
+ @JsonProperty("table_serving_url")
+ private String tableServingUrl;
+
/**
* The provisioning state of the synced table entity in Unity Catalog. This is distinct from the
* state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline
@@ -101,6 +105,15 @@ public SyncedTableSpec getSpec() {
return spec;
}
+ public SyncedDatabaseTable setTableServingUrl(String tableServingUrl) {
+ this.tableServingUrl = tableServingUrl;
+ return this;
+ }
+
+ public String getTableServingUrl() {
+ return tableServingUrl;
+ }
+
public SyncedDatabaseTable setUnityCatalogProvisioningState(
ProvisioningInfoState unityCatalogProvisioningState) {
this.unityCatalogProvisioningState = unityCatalogProvisioningState;
@@ -121,6 +134,7 @@ public boolean equals(Object o) {
&& Objects.equals(logicalDatabaseName, that.logicalDatabaseName)
&& Objects.equals(name, that.name)
&& Objects.equals(spec, that.spec)
+ && Objects.equals(tableServingUrl, that.tableServingUrl)
&& Objects.equals(unityCatalogProvisioningState, that.unityCatalogProvisioningState);
}
@@ -132,6 +146,7 @@ public int hashCode() {
logicalDatabaseName,
name,
spec,
+ tableServingUrl,
unityCatalogProvisioningState);
}
@@ -143,6 +158,7 @@ public String toString() {
.add("logicalDatabaseName", logicalDatabaseName)
.add("name", name)
.add("spec", spec)
+ .add("tableServingUrl", tableServingUrl)
.add("unityCatalogProvisioningState", unityCatalogProvisioningState)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseCatalogRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseCatalogRequest.java
new file mode 100755
index 000000000..49cca7c7a
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateDatabaseCatalogRequest.java
@@ -0,0 +1,76 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateDatabaseCatalogRequest {
+ /** Note that updating a database catalog is not yet supported. */
+ @JsonProperty("database_catalog")
+ private DatabaseCatalog databaseCatalog;
+
+ /** The name of the catalog in UC. */
+ @JsonIgnore private String name;
+
+ /** The list of fields to update. Setting this field is not yet supported. */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private String updateMask;
+
+ public UpdateDatabaseCatalogRequest setDatabaseCatalog(DatabaseCatalog databaseCatalog) {
+ this.databaseCatalog = databaseCatalog;
+ return this;
+ }
+
+ public DatabaseCatalog getDatabaseCatalog() {
+ return databaseCatalog;
+ }
+
+ public UpdateDatabaseCatalogRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public UpdateDatabaseCatalogRequest setUpdateMask(String updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public String getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateDatabaseCatalogRequest that = (UpdateDatabaseCatalogRequest) o;
+ return Objects.equals(databaseCatalog, that.databaseCatalog)
+ && Objects.equals(name, that.name)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(databaseCatalog, name, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateDatabaseCatalogRequest.class)
+ .add("databaseCatalog", databaseCatalog)
+ .add("name", name)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateSyncedDatabaseTableRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateSyncedDatabaseTableRequest.java
new file mode 100755
index 000000000..679dc16c8
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/database/UpdateSyncedDatabaseTableRequest.java
@@ -0,0 +1,76 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.database;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class UpdateSyncedDatabaseTableRequest {
+ /** Full three-part (catalog, schema, table) name of the table. */
+ @JsonIgnore private String name;
+
+ /** Note that updating a synced database table is not yet supported. */
+ @JsonProperty("synced_table")
+ private SyncedDatabaseTable syncedTable;
+
+ /** The list of fields to update. Setting this field is not yet supported. */
+ @JsonIgnore
+ @QueryParam("update_mask")
+ private String updateMask;
+
+ public UpdateSyncedDatabaseTableRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public UpdateSyncedDatabaseTableRequest setSyncedTable(SyncedDatabaseTable syncedTable) {
+ this.syncedTable = syncedTable;
+ return this;
+ }
+
+ public SyncedDatabaseTable getSyncedTable() {
+ return syncedTable;
+ }
+
+ public UpdateSyncedDatabaseTableRequest setUpdateMask(String updateMask) {
+ this.updateMask = updateMask;
+ return this;
+ }
+
+ public String getUpdateMask() {
+ return updateMask;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ UpdateSyncedDatabaseTableRequest that = (UpdateSyncedDatabaseTableRequest) o;
+ return Objects.equals(name, that.name)
+ && Objects.equals(syncedTable, that.syncedTable)
+ && Objects.equals(updateMask, that.updateMask);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name, syncedTable, updateMask);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(UpdateSyncedDatabaseTableRequest.class)
+ .add("name", name)
+ .add("syncedTable", syncedTable)
+ .add("updateMask", updateMask)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Continuous.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Continuous.java
index 7b239e250..5ffd9ac15 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Continuous.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/Continuous.java
@@ -15,6 +15,12 @@ public class Continuous {
@JsonProperty("pause_status")
private PauseStatus pauseStatus;
+ /**
+ * Indicate whether the continuous job is applying task level retries or not. Defaults to NEVER.
+ */
+ @JsonProperty("task_retry_mode")
+ private TaskRetryMode taskRetryMode;
+
public Continuous setPauseStatus(PauseStatus pauseStatus) {
this.pauseStatus = pauseStatus;
return this;
@@ -24,21 +30,34 @@ public PauseStatus getPauseStatus() {
return pauseStatus;
}
+ public Continuous setTaskRetryMode(TaskRetryMode taskRetryMode) {
+ this.taskRetryMode = taskRetryMode;
+ return this;
+ }
+
+ public TaskRetryMode getTaskRetryMode() {
+ return taskRetryMode;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Continuous that = (Continuous) o;
- return Objects.equals(pauseStatus, that.pauseStatus);
+ return Objects.equals(pauseStatus, that.pauseStatus)
+ && Objects.equals(taskRetryMode, that.taskRetryMode);
}
@Override
public int hashCode() {
- return Objects.hash(pauseStatus);
+ return Objects.hash(pauseStatus, taskRetryMode);
}
@Override
public String toString() {
- return new ToStringer(Continuous.class).add("pauseStatus", pauseStatus).toString();
+ return new ToStringer(Continuous.class)
+ .add("pauseStatus", pauseStatus)
+ .add("taskRetryMode", taskRetryMode)
+ .toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfiguration.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfiguration.java
new file mode 100755
index 000000000..36452e42e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfiguration.java
@@ -0,0 +1,125 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.jobs;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class ModelTriggerConfiguration {
+ /**
+ * Aliases of the model versions to monitor. Can only be used in conjunction with condition
+ * MODEL_ALIAS_SET.
+ */
+ @JsonProperty("aliases")
+ private Collection aliases;
+
+ /** The condition based on which to trigger a job run. */
+ @JsonProperty("condition")
+ private ModelTriggerConfigurationCondition condition;
+
+ /**
+ * If set, the trigger starts a run only after the specified amount of time has passed since the
+ * last time the trigger fired. The minimum allowed value is 60 seconds.
+ */
+ @JsonProperty("min_time_between_triggers_seconds")
+ private Long minTimeBetweenTriggersSeconds;
+
+ /**
+ * Name of the securable to monitor ("mycatalog.myschema.mymodel" in the case of model-level
+ * triggers, "mycatalog.myschema" in the case of schema-level triggers) or empty in the case of
+ * metastore-level triggers.
+ */
+ @JsonProperty("securable_name")
+ private String securableName;
+
+ /**
+ * If set, the trigger starts a run only after no model updates have occurred for the specified
+ * time and can be used to wait for a series of model updates before triggering a run. The minimum
+ * allowed value is 60 seconds.
+ */
+ @JsonProperty("wait_after_last_change_seconds")
+ private Long waitAfterLastChangeSeconds;
+
+ public ModelTriggerConfiguration setAliases(Collection aliases) {
+ this.aliases = aliases;
+ return this;
+ }
+
+ public Collection getAliases() {
+ return aliases;
+ }
+
+ public ModelTriggerConfiguration setCondition(ModelTriggerConfigurationCondition condition) {
+ this.condition = condition;
+ return this;
+ }
+
+ public ModelTriggerConfigurationCondition getCondition() {
+ return condition;
+ }
+
+ public ModelTriggerConfiguration setMinTimeBetweenTriggersSeconds(
+ Long minTimeBetweenTriggersSeconds) {
+ this.minTimeBetweenTriggersSeconds = minTimeBetweenTriggersSeconds;
+ return this;
+ }
+
+ public Long getMinTimeBetweenTriggersSeconds() {
+ return minTimeBetweenTriggersSeconds;
+ }
+
+ public ModelTriggerConfiguration setSecurableName(String securableName) {
+ this.securableName = securableName;
+ return this;
+ }
+
+ public String getSecurableName() {
+ return securableName;
+ }
+
+ public ModelTriggerConfiguration setWaitAfterLastChangeSeconds(Long waitAfterLastChangeSeconds) {
+ this.waitAfterLastChangeSeconds = waitAfterLastChangeSeconds;
+ return this;
+ }
+
+ public Long getWaitAfterLastChangeSeconds() {
+ return waitAfterLastChangeSeconds;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ModelTriggerConfiguration that = (ModelTriggerConfiguration) o;
+ return Objects.equals(aliases, that.aliases)
+ && Objects.equals(condition, that.condition)
+ && Objects.equals(minTimeBetweenTriggersSeconds, that.minTimeBetweenTriggersSeconds)
+ && Objects.equals(securableName, that.securableName)
+ && Objects.equals(waitAfterLastChangeSeconds, that.waitAfterLastChangeSeconds);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ aliases,
+ condition,
+ minTimeBetweenTriggersSeconds,
+ securableName,
+ waitAfterLastChangeSeconds);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(ModelTriggerConfiguration.class)
+ .add("aliases", aliases)
+ .add("condition", condition)
+ .add("minTimeBetweenTriggersSeconds", minTimeBetweenTriggersSeconds)
+ .add("securableName", securableName)
+ .add("waitAfterLastChangeSeconds", waitAfterLastChangeSeconds)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfigurationCondition.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfigurationCondition.java
new file mode 100755
index 000000000..263b649fa
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/ModelTriggerConfigurationCondition.java
@@ -0,0 +1,12 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.jobs;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum ModelTriggerConfigurationCondition {
+ MODEL_ALIAS_SET,
+ MODEL_CREATED,
+ MODEL_VERSION_READY,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskRetryMode.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskRetryMode.java
new file mode 100755
index 000000000..f6e8b6065
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TaskRetryMode.java
@@ -0,0 +1,17 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.jobs;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * task retry mode of the continuous job * NEVER: The failed task will not be retried. * ON_FAILURE:
+ * Retry a failed task if at least one other task in the job is still running its first attempt.
+ * When this condition is no longer met or the retry limit is reached, the job run is cancelled and
+ * a new run is started.
+ */
+@Generated
+public enum TaskRetryMode {
+ NEVER,
+ ON_FAILURE,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettings.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettings.java
index 7ee1fe4b1..0c5217593 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettings.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/jobs/TriggerSettings.java
@@ -13,6 +13,10 @@ public class TriggerSettings {
@JsonProperty("file_arrival")
private FileArrivalTriggerConfiguration fileArrival;
+ /** */
+ @JsonProperty("model")
+ private ModelTriggerConfiguration model;
+
/** Whether this trigger is paused or not. */
@JsonProperty("pause_status")
private PauseStatus pauseStatus;
@@ -38,6 +42,15 @@ public FileArrivalTriggerConfiguration getFileArrival() {
return fileArrival;
}
+ public TriggerSettings setModel(ModelTriggerConfiguration model) {
+ this.model = model;
+ return this;
+ }
+
+ public ModelTriggerConfiguration getModel() {
+ return model;
+ }
+
public TriggerSettings setPauseStatus(PauseStatus pauseStatus) {
this.pauseStatus = pauseStatus;
return this;
@@ -80,6 +93,7 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
TriggerSettings that = (TriggerSettings) o;
return Objects.equals(fileArrival, that.fileArrival)
+ && Objects.equals(model, that.model)
&& Objects.equals(pauseStatus, that.pauseStatus)
&& Objects.equals(periodic, that.periodic)
&& Objects.equals(table, that.table)
@@ -88,13 +102,14 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
- return Objects.hash(fileArrival, pauseStatus, periodic, table, tableUpdate);
+ return Objects.hash(fileArrival, model, pauseStatus, periodic, table, tableUpdate);
}
@Override
public String toString() {
return new ToStringer(TriggerSettings.class)
.add("fileArrival", fileArrival)
+ .add("model", model)
.add("pauseStatus", pauseStatus)
.add("periodic", periodic)
.add("table", table)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java
index e103caff1..cb0baa186 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsAPI.java
@@ -194,6 +194,11 @@ public GetLoggedModelResponse getLoggedModel(GetLoggedModelRequest request) {
return impl.getLoggedModel(request);
}
+ /** Batch endpoint for getting logged models from a list of model IDs */
+ public GetLoggedModelsRequestResponse getLoggedModels(GetLoggedModelsRequest request) {
+ return impl.getLoggedModels(request);
+ }
+
public GetExperimentPermissionLevelsResponse getPermissionLevels(String experimentId) {
return getPermissionLevels(
new GetExperimentPermissionLevelsRequest().setExperimentId(experimentId));
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java
index 0d39a660b..d3b844e34 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsImpl.java
@@ -208,6 +208,19 @@ public GetLoggedModelResponse getLoggedModel(GetLoggedModelRequest request) {
}
}
+ @Override
+ public GetLoggedModelsRequestResponse getLoggedModels(GetLoggedModelsRequest request) {
+ String path = "/api/2.0/mlflow/logged-models:batchGet";
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, GetLoggedModelsRequestResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public GetExperimentPermissionLevelsResponse getPermissionLevels(
GetExperimentPermissionLevelsRequest request) {
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java
index 486c97d62..a2eba9962 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/ExperimentsService.java
@@ -89,6 +89,9 @@ FinalizeLoggedModelResponse finalizeLoggedModel(
/** Get a logged model. */
GetLoggedModelResponse getLoggedModel(GetLoggedModelRequest getLoggedModelRequest);
+ /** Batch endpoint for getting logged models from a list of model IDs */
+ GetLoggedModelsRequestResponse getLoggedModels(GetLoggedModelsRequest getLoggedModelsRequest);
+
/** Gets the permission levels that a user can have on an object. */
GetExperimentPermissionLevelsResponse getPermissionLevels(
GetExperimentPermissionLevelsRequest getExperimentPermissionLevelsRequest);
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelsRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelsRequest.java
new file mode 100755
index 000000000..c71186bd0
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelsRequest.java
@@ -0,0 +1,45 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.ml;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class GetLoggedModelsRequest {
+ /** The IDs of the logged models to retrieve. Max threshold is 100. */
+ @JsonIgnore
+ @QueryParam("model_ids")
+ private Collection modelIds;
+
+ public GetLoggedModelsRequest setModelIds(Collection modelIds) {
+ this.modelIds = modelIds;
+ return this;
+ }
+
+ public Collection getModelIds() {
+ return modelIds;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetLoggedModelsRequest that = (GetLoggedModelsRequest) o;
+ return Objects.equals(modelIds, that.modelIds);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(modelIds);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetLoggedModelsRequest.class).add("modelIds", modelIds).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelsRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelsRequestResponse.java
new file mode 100755
index 000000000..a5469473e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/ml/GetLoggedModelsRequestResponse.java
@@ -0,0 +1,43 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.ml;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class GetLoggedModelsRequestResponse {
+ /** The retrieved logged models. */
+ @JsonProperty("models")
+ private Collection models;
+
+ public GetLoggedModelsRequestResponse setModels(Collection models) {
+ this.models = models;
+ return this;
+ }
+
+ public Collection getModels() {
+ return models;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetLoggedModelsRequestResponse that = (GetLoggedModelsRequestResponse) o;
+ return Objects.equals(models, that.models);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(models);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetLoggedModelsRequestResponse.class).add("models", models).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java
index 1471fd886..c513bf309 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/IngestionPipelineDefinition.java
@@ -17,6 +17,14 @@ public class IngestionPipelineDefinition {
@JsonProperty("connection_name")
private String connectionName;
+ /**
+ * Immutable. If set to true, the pipeline will ingest tables from the UC foreign catalogs
+ * directly without the need to specify a UC connection or ingestion gateway. The `source_catalog`
+ * fields in objects of IngestionConfig are interpreted as the UC foreign catalogs to ingest from.
+ */
+ @JsonProperty("ingest_from_uc_foreign_catalog")
+ private Boolean ingestFromUcForeignCatalog;
+
/**
* Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate
* with the source database. This is used with connectors to databases like SQL Server.
@@ -54,6 +62,16 @@ public String getConnectionName() {
return connectionName;
}
+ public IngestionPipelineDefinition setIngestFromUcForeignCatalog(
+ Boolean ingestFromUcForeignCatalog) {
+ this.ingestFromUcForeignCatalog = ingestFromUcForeignCatalog;
+ return this;
+ }
+
+ public Boolean getIngestFromUcForeignCatalog() {
+ return ingestFromUcForeignCatalog;
+ }
+
public IngestionPipelineDefinition setIngestionGatewayId(String ingestionGatewayId) {
this.ingestionGatewayId = ingestionGatewayId;
return this;
@@ -96,6 +114,7 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
IngestionPipelineDefinition that = (IngestionPipelineDefinition) o;
return Objects.equals(connectionName, that.connectionName)
+ && Objects.equals(ingestFromUcForeignCatalog, that.ingestFromUcForeignCatalog)
&& Objects.equals(ingestionGatewayId, that.ingestionGatewayId)
&& Objects.equals(objects, that.objects)
&& Objects.equals(sourceType, that.sourceType)
@@ -105,13 +124,19 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
return Objects.hash(
- connectionName, ingestionGatewayId, objects, sourceType, tableConfiguration);
+ connectionName,
+ ingestFromUcForeignCatalog,
+ ingestionGatewayId,
+ objects,
+ sourceType,
+ tableConfiguration);
}
@Override
public String toString() {
return new ToStringer(IngestionPipelineDefinition.class)
.add("connectionName", connectionName)
+ .add("ingestFromUcForeignCatalog", ingestFromUcForeignCatalog)
.add("ingestionGatewayId", ingestionGatewayId)
.add("objects", objects)
.add("sourceType", sourceType)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java
index ad41edac4..278fd9ae1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesAPI.java
@@ -191,6 +191,18 @@ public ListUpdatesResponse listUpdates(ListUpdatesRequest request) {
return impl.listUpdates(request);
}
+ public RestorePipelineRequestResponse restorePipeline(String pipelineId) {
+ return restorePipeline(new RestorePipelineRequest().setPipelineId(pipelineId));
+ }
+
+ /**
+ * * Restores a pipeline that was previously deleted, if within the restoration window. All tables
+ * deleted at pipeline deletion will be undropped as well.
+ */
+ public RestorePipelineRequestResponse restorePipeline(RestorePipelineRequest request) {
+ return impl.restorePipeline(request);
+ }
+
public PipelinePermissions setPermissions(String pipelineId) {
return setPermissions(new PipelinePermissionsRequest().setPipelineId(pipelineId));
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java
index 0fdad690a..739bb8c2e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesImpl.java
@@ -139,6 +139,19 @@ public ListUpdatesResponse listUpdates(ListUpdatesRequest request) {
}
}
+ @Override
+ public RestorePipelineRequestResponse restorePipeline(RestorePipelineRequest request) {
+ String path = String.format("/api/2.0/pipelines/%s/restore", request.getPipelineId());
+ try {
+ Request req = new Request("POST", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, RestorePipelineRequestResponse.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
@Override
public PipelinePermissions setPermissions(PipelinePermissionsRequest request) {
String path = String.format("/api/2.0/permissions/pipelines/%s", request.getPipelineId());
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java
index d2d0a81c4..84a47f4f1 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/PipelinesService.java
@@ -61,6 +61,12 @@ ListPipelineEventsResponse listPipelineEvents(
/** List updates for an active pipeline. */
ListUpdatesResponse listUpdates(ListUpdatesRequest listUpdatesRequest);
+ /**
+ * * Restores a pipeline that was previously deleted, if within the restoration window. All tables
+ * deleted at pipeline deletion will be undropped as well.
+ */
+ RestorePipelineRequestResponse restorePipeline(RestorePipelineRequest restorePipelineRequest);
+
/**
* Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
* permissions if none are specified. Objects can inherit permissions from their root object.
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestorePipelineRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestorePipelineRequest.java
new file mode 100755
index 000000000..c9b91f8ae
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestorePipelineRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.pipelines;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class RestorePipelineRequest {
+ /** The ID of the pipeline to restore */
+ @JsonIgnore private String pipelineId;
+
+ public RestorePipelineRequest setPipelineId(String pipelineId) {
+ this.pipelineId = pipelineId;
+ return this;
+ }
+
+ public String getPipelineId() {
+ return pipelineId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ RestorePipelineRequest that = (RestorePipelineRequest) o;
+ return Objects.equals(pipelineId, that.pipelineId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(pipelineId);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(RestorePipelineRequest.class).add("pipelineId", pipelineId).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestorePipelineRequestResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestorePipelineRequestResponse.java
new file mode 100755
index 000000000..293d32256
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/RestorePipelineRequestResponse.java
@@ -0,0 +1,28 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.pipelines;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import java.util.Objects;
+
+@Generated
+public class RestorePipelineRequestResponse {
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash();
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(RestorePipelineRequestResponse.class).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java
index 9d9d711e0..adcfda43c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/pipelines/TableSpecificConfig.java
@@ -37,6 +37,13 @@ public class TableSpecificConfig {
private IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig
queryBasedConnectorConfig;
+ /**
+ * (Optional, Immutable) The row filter condition to be applied to the table. It must not contain
+ * the WHERE keyword, only the actual filter condition. It must be in DBSQL format.
+ */
+ @JsonProperty("row_filter")
+ private String rowFilter;
+
/**
* If true, formula fields defined in the table are included in the ingestion. This setting is
* only valid for the Salesforce connector
@@ -94,6 +101,15 @@ public TableSpecificConfig setQueryBasedConnectorConfig(
return queryBasedConnectorConfig;
}
+ public TableSpecificConfig setRowFilter(String rowFilter) {
+ this.rowFilter = rowFilter;
+ return this;
+ }
+
+ public String getRowFilter() {
+ return rowFilter;
+ }
+
public TableSpecificConfig setSalesforceIncludeFormulaFields(
Boolean salesforceIncludeFormulaFields) {
this.salesforceIncludeFormulaFields = salesforceIncludeFormulaFields;
@@ -131,6 +147,7 @@ public boolean equals(Object o) {
&& Objects.equals(includeColumns, that.includeColumns)
&& Objects.equals(primaryKeys, that.primaryKeys)
&& Objects.equals(queryBasedConnectorConfig, that.queryBasedConnectorConfig)
+ && Objects.equals(rowFilter, that.rowFilter)
&& Objects.equals(salesforceIncludeFormulaFields, that.salesforceIncludeFormulaFields)
&& Objects.equals(scdType, that.scdType)
&& Objects.equals(sequenceBy, that.sequenceBy);
@@ -143,6 +160,7 @@ public int hashCode() {
includeColumns,
primaryKeys,
queryBasedConnectorConfig,
+ rowFilter,
salesforceIncludeFormulaFields,
scdType,
sequenceBy);
@@ -155,6 +173,7 @@ public String toString() {
.add("includeColumns", includeColumns)
.add("primaryKeys", primaryKeys)
.add("queryBasedConnectorConfig", queryBasedConnectorConfig)
+ .add("rowFilter", rowFilter)
.add("salesforceIncludeFormulaFields", salesforceIncludeFormulaFields)
.add("scdType", scdType)
.add("sequenceBy", sequenceBy)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java
index 5b14da636..88cd606f3 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionConfig.java
@@ -9,6 +9,10 @@
@Generated
public class AnomalyDetectionConfig {
+ /** The type of the last run of the workflow. */
+ @JsonProperty("job_type")
+ private AnomalyDetectionJobType jobType;
+
/** Run id of the last run of the workflow */
@JsonProperty("last_run_id")
private String lastRunId;
@@ -17,6 +21,15 @@ public class AnomalyDetectionConfig {
@JsonProperty("latest_run_status")
private AnomalyDetectionRunStatus latestRunStatus;
+ public AnomalyDetectionConfig setJobType(AnomalyDetectionJobType jobType) {
+ this.jobType = jobType;
+ return this;
+ }
+
+ public AnomalyDetectionJobType getJobType() {
+ return jobType;
+ }
+
public AnomalyDetectionConfig setLastRunId(String lastRunId) {
this.lastRunId = lastRunId;
return this;
@@ -40,18 +53,20 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AnomalyDetectionConfig that = (AnomalyDetectionConfig) o;
- return Objects.equals(lastRunId, that.lastRunId)
+ return Objects.equals(jobType, that.jobType)
+ && Objects.equals(lastRunId, that.lastRunId)
&& Objects.equals(latestRunStatus, that.latestRunStatus);
}
@Override
public int hashCode() {
- return Objects.hash(lastRunId, latestRunStatus);
+ return Objects.hash(jobType, lastRunId, latestRunStatus);
}
@Override
public String toString() {
return new ToStringer(AnomalyDetectionConfig.class)
+ .add("jobType", jobType)
.add("lastRunId", lastRunId)
.add("latestRunStatus", latestRunStatus)
.toString();
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionJobType.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionJobType.java
new file mode 100755
index 000000000..f54a53676
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/qualitymonitorv2/AnomalyDetectionJobType.java
@@ -0,0 +1,11 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.qualitymonitorv2;
+
+import com.databricks.sdk.support.Generated;
+
+@Generated
+public enum AnomalyDetectionJobType {
+ ANOMALY_DETECTION_JOB_TYPE_INTERNAL_HIDDEN,
+ ANOMALY_DETECTION_JOB_TYPE_NORMAL,
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimit.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimit.java
index c39679117..81ca442ba 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimit.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/serving/AiGatewayRateLimit.java
@@ -31,6 +31,10 @@ public class AiGatewayRateLimit {
@JsonProperty("renewal_period")
private AiGatewayRateLimitRenewalPeriod renewalPeriod;
+ /** Used to specify how many tokens are allowed for a key within the renewal_period. */
+ @JsonProperty("tokens")
+ private Long tokens;
+
public AiGatewayRateLimit setCalls(Long calls) {
this.calls = calls;
return this;
@@ -67,6 +71,15 @@ public AiGatewayRateLimitRenewalPeriod getRenewalPeriod() {
return renewalPeriod;
}
+ public AiGatewayRateLimit setTokens(Long tokens) {
+ this.tokens = tokens;
+ return this;
+ }
+
+ public Long getTokens() {
+ return tokens;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
@@ -75,12 +88,13 @@ public boolean equals(Object o) {
return Objects.equals(calls, that.calls)
&& Objects.equals(key, that.key)
&& Objects.equals(principal, that.principal)
- && Objects.equals(renewalPeriod, that.renewalPeriod);
+ && Objects.equals(renewalPeriod, that.renewalPeriod)
+ && Objects.equals(tokens, that.tokens);
}
@Override
public int hashCode() {
- return Objects.hash(calls, key, principal, renewalPeriod);
+ return Objects.hash(calls, key, principal, renewalPeriod, tokens);
}
@Override
@@ -90,6 +104,7 @@ public String toString() {
.add("key", key)
.add("principal", principal)
.add("renewalPeriod", renewalPeriod)
+ .add("tokens", tokens)
.toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/AccountSettingsV2API.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/AccountSettingsV2API.java
new file mode 100755
index 000000000..93742e30d
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/AccountSettingsV2API.java
@@ -0,0 +1,48 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settingsv2;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** APIs to manage account level settings */
+@Generated
+public class AccountSettingsV2API {
+ private static final Logger LOG = LoggerFactory.getLogger(AccountSettingsV2API.class);
+
+ private final AccountSettingsV2Service impl;
+
+ /** Regular-use constructor */
+ public AccountSettingsV2API(ApiClient apiClient) {
+ impl = new AccountSettingsV2Impl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public AccountSettingsV2API(AccountSettingsV2Service mock) {
+ impl = mock;
+ }
+
+ public Setting getPublicAccountSetting(String name) {
+ return getPublicAccountSetting(new GetPublicAccountSettingRequest().setName(name));
+ }
+
+ /** Get a setting value at account level */
+ public Setting getPublicAccountSetting(GetPublicAccountSettingRequest request) {
+ return impl.getPublicAccountSetting(request);
+ }
+
+ public Setting patchPublicAccountSetting(String name, Setting setting) {
+ return patchPublicAccountSetting(
+ new PatchPublicAccountSettingRequest().setName(name).setSetting(setting));
+ }
+
+ /** Patch a setting value at account level */
+ public Setting patchPublicAccountSetting(PatchPublicAccountSettingRequest request) {
+ return impl.patchPublicAccountSetting(request);
+ }
+
+ public AccountSettingsV2Service impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/AccountSettingsV2Impl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/AccountSettingsV2Impl.java
new file mode 100755
index 000000000..c1bc637fe
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/AccountSettingsV2Impl.java
@@ -0,0 +1,49 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settingsv2;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of AccountSettingsV2 */
+@Generated
+class AccountSettingsV2Impl implements AccountSettingsV2Service {
+ private final ApiClient apiClient;
+
+ public AccountSettingsV2Impl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public Setting getPublicAccountSetting(GetPublicAccountSettingRequest request) {
+ String path =
+ String.format(
+ "/api/2.1/accounts/%s/settings/%s", apiClient.configuredAccountID(), request.getName());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, Setting.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public Setting patchPublicAccountSetting(PatchPublicAccountSettingRequest request) {
+ String path =
+ String.format(
+ "/api/2.1/accounts/%s/settings/%s", apiClient.configuredAccountID(), request.getName());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getSetting()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, Setting.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/AccountSettingsV2Service.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/AccountSettingsV2Service.java
new file mode 100755
index 000000000..3e217c511
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/AccountSettingsV2Service.java
@@ -0,0 +1,21 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settingsv2;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * APIs to manage account level settings
+ *
+ * This is the high-level interface, that contains generated methods.
+ *
+ *
Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface AccountSettingsV2Service {
+ /** Get a setting value at account level */
+ Setting getPublicAccountSetting(GetPublicAccountSettingRequest getPublicAccountSettingRequest);
+
+ /** Patch a setting value at account level */
+ Setting patchPublicAccountSetting(
+ PatchPublicAccountSettingRequest patchPublicAccountSettingRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/BooleanMessage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/BooleanMessage.java
new file mode 100755
index 000000000..7e387bc54
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/BooleanMessage.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settingsv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class BooleanMessage {
+ /** */
+ @JsonProperty("value")
+ private Boolean value;
+
+ public BooleanMessage setValue(Boolean value) {
+ this.value = value;
+ return this;
+ }
+
+ public Boolean getValue() {
+ return value;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ BooleanMessage that = (BooleanMessage) o;
+ return Objects.equals(value, that.value);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(value);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(BooleanMessage.class).add("value", value).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/GetPublicAccountSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/GetPublicAccountSettingRequest.java
new file mode 100755
index 000000000..aeb8ae373
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/GetPublicAccountSettingRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settingsv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetPublicAccountSettingRequest {
+ /** */
+ @JsonIgnore private String name;
+
+ public GetPublicAccountSettingRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetPublicAccountSettingRequest that = (GetPublicAccountSettingRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetPublicAccountSettingRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/GetPublicWorkspaceSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/GetPublicWorkspaceSettingRequest.java
new file mode 100755
index 000000000..928c553e6
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/GetPublicWorkspaceSettingRequest.java
@@ -0,0 +1,41 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settingsv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import java.util.Objects;
+
+@Generated
+public class GetPublicWorkspaceSettingRequest {
+ /** */
+ @JsonIgnore private String name;
+
+ public GetPublicWorkspaceSettingRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GetPublicWorkspaceSettingRequest that = (GetPublicWorkspaceSettingRequest) o;
+ return Objects.equals(name, that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(GetPublicWorkspaceSettingRequest.class).add("name", name).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/IntegerMessage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/IntegerMessage.java
new file mode 100755
index 000000000..007047d0e
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/IntegerMessage.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settingsv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class IntegerMessage {
+ /** */
+ @JsonProperty("value")
+ private Long value;
+
+ public IntegerMessage setValue(Long value) {
+ this.value = value;
+ return this;
+ }
+
+ public Long getValue() {
+ return value;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ IntegerMessage that = (IntegerMessage) o;
+ return Objects.equals(value, that.value);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(value);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(IntegerMessage.class).add("value", value).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/PatchPublicAccountSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/PatchPublicAccountSettingRequest.java
new file mode 100755
index 000000000..3a0a7aa63
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/PatchPublicAccountSettingRequest.java
@@ -0,0 +1,58 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settingsv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class PatchPublicAccountSettingRequest {
+ /** */
+ @JsonIgnore private String name;
+
+ /** */
+ @JsonProperty("setting")
+ private Setting setting;
+
+ public PatchPublicAccountSettingRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public PatchPublicAccountSettingRequest setSetting(Setting setting) {
+ this.setting = setting;
+ return this;
+ }
+
+ public Setting getSetting() {
+ return setting;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ PatchPublicAccountSettingRequest that = (PatchPublicAccountSettingRequest) o;
+ return Objects.equals(name, that.name) && Objects.equals(setting, that.setting);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name, setting);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(PatchPublicAccountSettingRequest.class)
+ .add("name", name)
+ .add("setting", setting)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/PatchPublicWorkspaceSettingRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/PatchPublicWorkspaceSettingRequest.java
new file mode 100755
index 000000000..595a9704b
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/PatchPublicWorkspaceSettingRequest.java
@@ -0,0 +1,58 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settingsv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class PatchPublicWorkspaceSettingRequest {
+ /** */
+ @JsonIgnore private String name;
+
+ /** */
+ @JsonProperty("setting")
+ private Setting setting;
+
+ public PatchPublicWorkspaceSettingRequest setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public PatchPublicWorkspaceSettingRequest setSetting(Setting setting) {
+ this.setting = setting;
+ return this;
+ }
+
+ public Setting getSetting() {
+ return setting;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ PatchPublicWorkspaceSettingRequest that = (PatchPublicWorkspaceSettingRequest) o;
+ return Objects.equals(name, that.name) && Objects.equals(setting, that.setting);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name, setting);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(PatchPublicWorkspaceSettingRequest.class)
+ .add("name", name)
+ .add("setting", setting)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/Setting.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/Setting.java
new file mode 100755
index 000000000..b51ca2286
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/Setting.java
@@ -0,0 +1,141 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settingsv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class Setting {
+ /** */
+ @JsonProperty("boolean_val")
+ private BooleanMessage booleanVal;
+
+ /** */
+ @JsonProperty("effective_boolean_val")
+ private BooleanMessage effectiveBooleanVal;
+
+ /** */
+ @JsonProperty("effective_integer_val")
+ private IntegerMessage effectiveIntegerVal;
+
+ /** */
+ @JsonProperty("effective_string_val")
+ private StringMessage effectiveStringVal;
+
+ /** */
+ @JsonProperty("integer_val")
+ private IntegerMessage integerVal;
+
+ /** Name of the setting. */
+ @JsonProperty("name")
+ private String name;
+
+ /** */
+ @JsonProperty("string_val")
+ private StringMessage stringVal;
+
+ public Setting setBooleanVal(BooleanMessage booleanVal) {
+ this.booleanVal = booleanVal;
+ return this;
+ }
+
+ public BooleanMessage getBooleanVal() {
+ return booleanVal;
+ }
+
+ public Setting setEffectiveBooleanVal(BooleanMessage effectiveBooleanVal) {
+ this.effectiveBooleanVal = effectiveBooleanVal;
+ return this;
+ }
+
+ public BooleanMessage getEffectiveBooleanVal() {
+ return effectiveBooleanVal;
+ }
+
+ public Setting setEffectiveIntegerVal(IntegerMessage effectiveIntegerVal) {
+ this.effectiveIntegerVal = effectiveIntegerVal;
+ return this;
+ }
+
+ public IntegerMessage getEffectiveIntegerVal() {
+ return effectiveIntegerVal;
+ }
+
+ public Setting setEffectiveStringVal(StringMessage effectiveStringVal) {
+ this.effectiveStringVal = effectiveStringVal;
+ return this;
+ }
+
+ public StringMessage getEffectiveStringVal() {
+ return effectiveStringVal;
+ }
+
+ public Setting setIntegerVal(IntegerMessage integerVal) {
+ this.integerVal = integerVal;
+ return this;
+ }
+
+ public IntegerMessage getIntegerVal() {
+ return integerVal;
+ }
+
+ public Setting setName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public Setting setStringVal(StringMessage stringVal) {
+ this.stringVal = stringVal;
+ return this;
+ }
+
+ public StringMessage getStringVal() {
+ return stringVal;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ Setting that = (Setting) o;
+ return Objects.equals(booleanVal, that.booleanVal)
+ && Objects.equals(effectiveBooleanVal, that.effectiveBooleanVal)
+ && Objects.equals(effectiveIntegerVal, that.effectiveIntegerVal)
+ && Objects.equals(effectiveStringVal, that.effectiveStringVal)
+ && Objects.equals(integerVal, that.integerVal)
+ && Objects.equals(name, that.name)
+ && Objects.equals(stringVal, that.stringVal);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(
+ booleanVal,
+ effectiveBooleanVal,
+ effectiveIntegerVal,
+ effectiveStringVal,
+ integerVal,
+ name,
+ stringVal);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(Setting.class)
+ .add("booleanVal", booleanVal)
+ .add("effectiveBooleanVal", effectiveBooleanVal)
+ .add("effectiveIntegerVal", effectiveIntegerVal)
+ .add("effectiveStringVal", effectiveStringVal)
+ .add("integerVal", integerVal)
+ .add("name", name)
+ .add("stringVal", stringVal)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/StringMessage.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/StringMessage.java
new file mode 100755
index 000000000..09db23e57
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/StringMessage.java
@@ -0,0 +1,42 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.settingsv2;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class StringMessage {
+ /** Represents a generic string value. */
+ @JsonProperty("value")
+ private String value;
+
+ public StringMessage setValue(String value) {
+ this.value = value;
+ return this;
+ }
+
+ public String getValue() {
+ return value;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ StringMessage that = (StringMessage) o;
+ return Objects.equals(value, that.value);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(value);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(StringMessage.class).add("value", value).toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/WorkspaceSettingsV2API.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/WorkspaceSettingsV2API.java
new file mode 100755
index 000000000..a5cace024
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/WorkspaceSettingsV2API.java
@@ -0,0 +1,48 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settingsv2;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.support.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** APIs to manage workspace level settings */
+@Generated
+public class WorkspaceSettingsV2API {
+ private static final Logger LOG = LoggerFactory.getLogger(WorkspaceSettingsV2API.class);
+
+ private final WorkspaceSettingsV2Service impl;
+
+ /** Regular-use constructor */
+ public WorkspaceSettingsV2API(ApiClient apiClient) {
+ impl = new WorkspaceSettingsV2Impl(apiClient);
+ }
+
+ /** Constructor for mocks */
+ public WorkspaceSettingsV2API(WorkspaceSettingsV2Service mock) {
+ impl = mock;
+ }
+
+ public Setting getPublicWorkspaceSetting(String name) {
+ return getPublicWorkspaceSetting(new GetPublicWorkspaceSettingRequest().setName(name));
+ }
+
+ /** Get a setting value at workspace level */
+ public Setting getPublicWorkspaceSetting(GetPublicWorkspaceSettingRequest request) {
+ return impl.getPublicWorkspaceSetting(request);
+ }
+
+ public Setting patchPublicWorkspaceSetting(String name, Setting setting) {
+ return patchPublicWorkspaceSetting(
+ new PatchPublicWorkspaceSettingRequest().setName(name).setSetting(setting));
+ }
+
+ /** Patch a setting value at workspace level */
+ public Setting patchPublicWorkspaceSetting(PatchPublicWorkspaceSettingRequest request) {
+ return impl.patchPublicWorkspaceSetting(request);
+ }
+
+ public WorkspaceSettingsV2Service impl() {
+ return impl;
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/WorkspaceSettingsV2Impl.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/WorkspaceSettingsV2Impl.java
new file mode 100755
index 000000000..00cc78173
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/WorkspaceSettingsV2Impl.java
@@ -0,0 +1,45 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settingsv2;
+
+import com.databricks.sdk.core.ApiClient;
+import com.databricks.sdk.core.DatabricksException;
+import com.databricks.sdk.core.http.Request;
+import com.databricks.sdk.support.Generated;
+import java.io.IOException;
+
+/** Package-local implementation of WorkspaceSettingsV2 */
+@Generated
+class WorkspaceSettingsV2Impl implements WorkspaceSettingsV2Service {
+ private final ApiClient apiClient;
+
+ public WorkspaceSettingsV2Impl(ApiClient apiClient) {
+ this.apiClient = apiClient;
+ }
+
+ @Override
+ public Setting getPublicWorkspaceSetting(GetPublicWorkspaceSettingRequest request) {
+ String path = String.format("/api/2.1/settings/%s", request.getName());
+ try {
+ Request req = new Request("GET", path);
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ return apiClient.execute(req, Setting.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+
+ @Override
+ public Setting patchPublicWorkspaceSetting(PatchPublicWorkspaceSettingRequest request) {
+ String path = String.format("/api/2.1/settings/%s", request.getName());
+ try {
+ Request req = new Request("PATCH", path, apiClient.serialize(request.getSetting()));
+ ApiClient.setQuery(req, request);
+ req.withHeader("Accept", "application/json");
+ req.withHeader("Content-Type", "application/json");
+ return apiClient.execute(req, Setting.class);
+ } catch (IOException e) {
+ throw new DatabricksException("IO error: " + e.getMessage(), e);
+ }
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/WorkspaceSettingsV2Service.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/WorkspaceSettingsV2Service.java
new file mode 100755
index 000000000..1ed66fd13
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/settingsv2/WorkspaceSettingsV2Service.java
@@ -0,0 +1,22 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+package com.databricks.sdk.service.settingsv2;
+
+import com.databricks.sdk.support.Generated;
+
+/**
+ * APIs to manage workspace level settings
+ *
+ *
This is the high-level interface, that contains generated methods.
+ *
+ *
Evolving: this interface is under development. Method signatures may change.
+ */
+@Generated
+public interface WorkspaceSettingsV2Service {
+ /** Get a setting value at workspace level */
+ Setting getPublicWorkspaceSetting(
+ GetPublicWorkspaceSettingRequest getPublicWorkspaceSettingRequest);
+
+ /** Patch a setting value at workspace level */
+ Setting patchPublicWorkspaceSetting(
+ PatchPublicWorkspaceSettingRequest patchPublicWorkspaceSettingRequest);
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PermissionsChange.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PermissionsChange.java
index 4467ec7fd..89a7ccfdc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PermissionsChange.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PermissionsChange.java
@@ -21,6 +21,18 @@ public class PermissionsChange {
@JsonProperty("principal")
private String principal;
+ /**
+ * An opaque internal ID that identifies the principal whose privileges should be removed.
+ *
+ *
This field is intended for removing privileges associated with a deleted user. When set,
+ * only the entries specified in the remove field are processed; any entries in the add field will
+ * be rejected.
+ *
+ *
Only one of principal or principal_id should be specified, never both at the same time.
+ */
+ @JsonProperty("principal_id")
+ private Long principalId;
+
/** The set of privileges to remove. */
@JsonProperty("remove")
private Collection remove;
@@ -43,6 +55,15 @@ public String getPrincipal() {
return principal;
}
+ public PermissionsChange setPrincipalId(Long principalId) {
+ this.principalId = principalId;
+ return this;
+ }
+
+ public Long getPrincipalId() {
+ return principalId;
+ }
+
public PermissionsChange setRemove(Collection remove) {
this.remove = remove;
return this;
@@ -59,12 +80,13 @@ public boolean equals(Object o) {
PermissionsChange that = (PermissionsChange) o;
return Objects.equals(add, that.add)
&& Objects.equals(principal, that.principal)
+ && Objects.equals(principalId, that.principalId)
&& Objects.equals(remove, that.remove);
}
@Override
public int hashCode() {
- return Objects.hash(add, principal, remove);
+ return Objects.hash(add, principal, principalId, remove);
}
@Override
@@ -72,6 +94,7 @@ public String toString() {
return new ToStringer(PermissionsChange.class)
.add("add", add)
.add("principal", principal)
+ .add("principalId", principalId)
.add("remove", remove)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PrivilegeAssignment.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PrivilegeAssignment.java
index c3e2fcfae..365e263de 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PrivilegeAssignment.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/sharing/PrivilegeAssignment.java
@@ -17,6 +17,13 @@ public class PrivilegeAssignment {
@JsonProperty("principal")
private String principal;
+ /**
+ * Unique identifier of the principal. For active principals, both `principal` and `principal_id`
+ * are present.
+ */
+ @JsonProperty("principal_id")
+ private Long principalId;
+
/** The privileges assigned to the principal. */
@JsonProperty("privileges")
private Collection privileges;
@@ -30,6 +37,15 @@ public String getPrincipal() {
return principal;
}
+ public PrivilegeAssignment setPrincipalId(Long principalId) {
+ this.principalId = principalId;
+ return this;
+ }
+
+ public Long getPrincipalId() {
+ return principalId;
+ }
+
public PrivilegeAssignment setPrivileges(Collection privileges) {
this.privileges = privileges;
return this;
@@ -44,18 +60,21 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
PrivilegeAssignment that = (PrivilegeAssignment) o;
- return Objects.equals(principal, that.principal) && Objects.equals(privileges, that.privileges);
+ return Objects.equals(principal, that.principal)
+ && Objects.equals(principalId, that.principalId)
+ && Objects.equals(privileges, that.privileges);
}
@Override
public int hashCode() {
- return Objects.hash(principal, privileges);
+ return Objects.hash(principal, principalId, privileges);
}
@Override
public String toString() {
return new ToStringer(PrivilegeAssignment.class)
.add("principal", principal)
+ .add("principalId", principalId)
.add("privileges", privileges)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecRequest.java
index e893e2b55..bf197a27c 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecRequest.java
@@ -18,6 +18,10 @@ public class DeltaSyncVectorIndexSpecRequest {
@JsonProperty("columns_to_sync")
private Collection columnsToSync;
+ /** The budget policy id applied to the vector search index */
+ @JsonProperty("effective_budget_policy_id")
+ private String effectiveBudgetPolicyId;
+
/** The columns that contain the embedding source. */
@JsonProperty("embedding_source_columns")
private Collection embeddingSourceColumns;
@@ -56,6 +60,16 @@ public Collection getColumnsToSync() {
return columnsToSync;
}
+ public DeltaSyncVectorIndexSpecRequest setEffectiveBudgetPolicyId(
+ String effectiveBudgetPolicyId) {
+ this.effectiveBudgetPolicyId = effectiveBudgetPolicyId;
+ return this;
+ }
+
+ public String getEffectiveBudgetPolicyId() {
+ return effectiveBudgetPolicyId;
+ }
+
public DeltaSyncVectorIndexSpecRequest setEmbeddingSourceColumns(
Collection embeddingSourceColumns) {
this.embeddingSourceColumns = embeddingSourceColumns;
@@ -110,6 +124,7 @@ public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
DeltaSyncVectorIndexSpecRequest that = (DeltaSyncVectorIndexSpecRequest) o;
return Objects.equals(columnsToSync, that.columnsToSync)
+ && Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId)
&& Objects.equals(embeddingSourceColumns, that.embeddingSourceColumns)
&& Objects.equals(embeddingVectorColumns, that.embeddingVectorColumns)
&& Objects.equals(embeddingWritebackTable, that.embeddingWritebackTable)
@@ -121,6 +136,7 @@ public boolean equals(Object o) {
public int hashCode() {
return Objects.hash(
columnsToSync,
+ effectiveBudgetPolicyId,
embeddingSourceColumns,
embeddingVectorColumns,
embeddingWritebackTable,
@@ -132,6 +148,7 @@ public int hashCode() {
public String toString() {
return new ToStringer(DeltaSyncVectorIndexSpecRequest.class)
.add("columnsToSync", columnsToSync)
+ .add("effectiveBudgetPolicyId", effectiveBudgetPolicyId)
.add("embeddingSourceColumns", embeddingSourceColumns)
.add("embeddingVectorColumns", embeddingVectorColumns)
.add("embeddingWritebackTable", embeddingWritebackTable)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponse.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponse.java
index 9f2f17700..edd06e52b 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponse.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/DeltaSyncVectorIndexSpecResponse.java
@@ -10,6 +10,10 @@
@Generated
public class DeltaSyncVectorIndexSpecResponse {
+ /** The budget policy id applied to the vector search index */
+ @JsonProperty("effective_budget_policy_id")
+ private String effectiveBudgetPolicyId;
+
/** The columns that contain the embedding source. */
@JsonProperty("embedding_source_columns")
private Collection embeddingSourceColumns;
@@ -43,6 +47,16 @@ public class DeltaSyncVectorIndexSpecResponse {
@JsonProperty("source_table")
private String sourceTable;
+ public DeltaSyncVectorIndexSpecResponse setEffectiveBudgetPolicyId(
+ String effectiveBudgetPolicyId) {
+ this.effectiveBudgetPolicyId = effectiveBudgetPolicyId;
+ return this;
+ }
+
+ public String getEffectiveBudgetPolicyId() {
+ return effectiveBudgetPolicyId;
+ }
+
public DeltaSyncVectorIndexSpecResponse setEmbeddingSourceColumns(
Collection embeddingSourceColumns) {
this.embeddingSourceColumns = embeddingSourceColumns;
@@ -105,7 +119,8 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DeltaSyncVectorIndexSpecResponse that = (DeltaSyncVectorIndexSpecResponse) o;
- return Objects.equals(embeddingSourceColumns, that.embeddingSourceColumns)
+ return Objects.equals(effectiveBudgetPolicyId, that.effectiveBudgetPolicyId)
+ && Objects.equals(embeddingSourceColumns, that.embeddingSourceColumns)
&& Objects.equals(embeddingVectorColumns, that.embeddingVectorColumns)
&& Objects.equals(embeddingWritebackTable, that.embeddingWritebackTable)
&& Objects.equals(pipelineId, that.pipelineId)
@@ -116,6 +131,7 @@ public boolean equals(Object o) {
@Override
public int hashCode() {
return Objects.hash(
+ effectiveBudgetPolicyId,
embeddingSourceColumns,
embeddingVectorColumns,
embeddingWritebackTable,
@@ -127,6 +143,7 @@ public int hashCode() {
@Override
public String toString() {
return new ToStringer(DeltaSyncVectorIndexSpecResponse.class)
+ .add("effectiveBudgetPolicyId", effectiveBudgetPolicyId)
.add("embeddingSourceColumns", embeddingSourceColumns)
.add("embeddingVectorColumns", embeddingVectorColumns)
.add("embeddingWritebackTable", embeddingWritebackTable)
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/GetIndexRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/GetIndexRequest.java
index 53a6c4cd5..0390811cc 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/GetIndexRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/GetIndexRequest.java
@@ -3,15 +3,35 @@
package com.databricks.sdk.service.vectorsearch;
import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.QueryParam;
import com.databricks.sdk.support.ToStringer;
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.util.Objects;
@Generated
public class GetIndexRequest {
+ /**
+ * If true, the URL returned for the index is guaranteed to be compatible with the reranker.
+ * Currently this means we return the CP URL regardless of how the index is being accessed. If not
+ * set or set to false, the URL may still be compatible with the reranker depending on what URL we
+ * return.
+ */
+ @JsonIgnore
+ @QueryParam("ensure_reranker_compatible")
+ private Boolean ensureRerankerCompatible;
+
/** Name of the index */
@JsonIgnore private String indexName;
+ public GetIndexRequest setEnsureRerankerCompatible(Boolean ensureRerankerCompatible) {
+ this.ensureRerankerCompatible = ensureRerankerCompatible;
+ return this;
+ }
+
+ public Boolean getEnsureRerankerCompatible() {
+ return ensureRerankerCompatible;
+ }
+
public GetIndexRequest setIndexName(String indexName) {
this.indexName = indexName;
return this;
@@ -26,16 +46,20 @@ public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GetIndexRequest that = (GetIndexRequest) o;
- return Objects.equals(indexName, that.indexName);
+ return Objects.equals(ensureRerankerCompatible, that.ensureRerankerCompatible)
+ && Objects.equals(indexName, that.indexName);
}
@Override
public int hashCode() {
- return Objects.hash(indexName);
+ return Objects.hash(ensureRerankerCompatible, indexName);
}
@Override
public String toString() {
- return new ToStringer(GetIndexRequest.class).add("indexName", indexName).toString();
+ return new ToStringer(GetIndexRequest.class)
+ .add("ensureRerankerCompatible", ensureRerankerCompatible)
+ .add("indexName", indexName)
+ .toString();
}
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java
index e07f748b7..bdc74194e 100755
--- a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/QueryVectorIndexRequest.java
@@ -53,6 +53,10 @@ public class QueryVectorIndexRequest {
@JsonProperty("query_vector")
private Collection queryVector;
+ /** */
+ @JsonProperty("reranker")
+ private RerankerConfig reranker;
+
/** Threshold for the approximate nearest neighbor search. Defaults to 0.0. */
@JsonProperty("score_threshold")
private Double scoreThreshold;
@@ -129,6 +133,15 @@ public Collection getQueryVector() {
return queryVector;
}
+ public QueryVectorIndexRequest setReranker(RerankerConfig reranker) {
+ this.reranker = reranker;
+ return this;
+ }
+
+ public RerankerConfig getReranker() {
+ return reranker;
+ }
+
public QueryVectorIndexRequest setScoreThreshold(Double scoreThreshold) {
this.scoreThreshold = scoreThreshold;
return this;
@@ -151,6 +164,7 @@ public boolean equals(Object o) {
&& Objects.equals(queryText, that.queryText)
&& Objects.equals(queryType, that.queryType)
&& Objects.equals(queryVector, that.queryVector)
+ && Objects.equals(reranker, that.reranker)
&& Objects.equals(scoreThreshold, that.scoreThreshold);
}
@@ -165,6 +179,7 @@ public int hashCode() {
queryText,
queryType,
queryVector,
+ reranker,
scoreThreshold);
}
@@ -179,6 +194,7 @@ public String toString() {
.add("queryText", queryText)
.add("queryType", queryType)
.add("queryVector", queryVector)
+ .add("reranker", reranker)
.add("scoreThreshold", scoreThreshold)
.toString();
}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/RerankerConfig.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/RerankerConfig.java
new file mode 100755
index 000000000..1aa0089d9
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/RerankerConfig.java
@@ -0,0 +1,58 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Objects;
+
+@Generated
+public class RerankerConfig {
+ /** */
+ @JsonProperty("model")
+ private String model;
+
+ /** */
+ @JsonProperty("parameters")
+ private RerankerConfigRerankerParameters parameters;
+
+ public RerankerConfig setModel(String model) {
+ this.model = model;
+ return this;
+ }
+
+ public String getModel() {
+ return model;
+ }
+
+ public RerankerConfig setParameters(RerankerConfigRerankerParameters parameters) {
+ this.parameters = parameters;
+ return this;
+ }
+
+ public RerankerConfigRerankerParameters getParameters() {
+ return parameters;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ RerankerConfig that = (RerankerConfig) o;
+ return Objects.equals(model, that.model) && Objects.equals(parameters, that.parameters);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(model, parameters);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(RerankerConfig.class)
+ .add("model", model)
+ .add("parameters", parameters)
+ .toString();
+ }
+}
diff --git a/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/RerankerConfigRerankerParameters.java b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/RerankerConfigRerankerParameters.java
new file mode 100755
index 000000000..cf8184596
--- /dev/null
+++ b/databricks-sdk-java/src/main/java/com/databricks/sdk/service/vectorsearch/RerankerConfigRerankerParameters.java
@@ -0,0 +1,45 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package com.databricks.sdk.service.vectorsearch;
+
+import com.databricks.sdk.support.Generated;
+import com.databricks.sdk.support.ToStringer;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Collection;
+import java.util.Objects;
+
+@Generated
+public class RerankerConfigRerankerParameters {
+ /** */
+ @JsonProperty("columns_to_rerank")
+ private Collection columnsToRerank;
+
+ public RerankerConfigRerankerParameters setColumnsToRerank(Collection columnsToRerank) {
+ this.columnsToRerank = columnsToRerank;
+ return this;
+ }
+
+ public Collection getColumnsToRerank() {
+ return columnsToRerank;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ RerankerConfigRerankerParameters that = (RerankerConfigRerankerParameters) o;
+ return Objects.equals(columnsToRerank, that.columnsToRerank);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(columnsToRerank);
+ }
+
+ @Override
+ public String toString() {
+ return new ToStringer(RerankerConfigRerankerParameters.class)
+ .add("columnsToRerank", columnsToRerank)
+ .toString();
+ }
+}